diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index bee5dccbf88..d2c6d89093a 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -10,4 +10,4 @@ jobs: runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 - - uses: AlexanderDokuchaev/md-dead-link-check@v0.6 + - uses: AlexanderDokuchaev/md-dead-link-check@v0.8 diff --git a/.github/workflows/pre-commit-linters.yml b/.github/workflows/pre-commit-linters.yml index 0c911b8f0c6..288566d66d2 100644 --- a/.github/workflows/pre-commit-linters.yml +++ b/.github/workflows/pre-commit-linters.yml @@ -23,4 +23,4 @@ jobs: runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 - - uses: AlexanderDokuchaev/md-dead-link-check@v0.6 + - uses: AlexanderDokuchaev/md-dead-link-check@v0.8 diff --git a/.mypy.ini b/.mypy.ini index 42b6329f72c..015bbb5f890 100644 --- a/.mypy.ini +++ b/.mypy.ini @@ -1,5 +1,5 @@ [mypy] -files = nncf/common/sparsity +files = nncf/common/sparsity, nncf/common/graph follow_imports = silent strict = True diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 11dd747ce2e..34a3e5a293f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,7 +17,7 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.1.3 + rev: v0.3.7 hooks: - id: ruff diff --git a/README.md b/README.md index b8feafc0249..24978e23c4a 100644 --- a/README.md +++ b/README.md @@ -127,6 +127,8 @@ quantized_model = nncf.quantize(model, calibration_dataset) ``` +**NOTE** In case the Post-Training Quantization algorithm could not reach quality requirements you can fine-tune the quantized pytorch model. Example of the Quantization-Aware training pipeline for a pytorch model could be found [here](examples/quantization_aware_training/torch/resnet18/README.md). +
TensorFlow @@ -273,44 +275,44 @@ For a quicker start with NNCF-powered compression, try sample notebooks and scri A collection of ready-to-run Jupyter* notebooks tutorials and demos are available to explain and display NNCF compression algorithms for optimizing models for inference with the OpenVINO Toolkit. -| Notebook Tutorial Name | Compression Algorithm | Backend | Domain | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------:|:----------:|:-----------------------------------:| -| [BERT Quantization](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/105-language-quantize-bert)
[![Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/openvinotoolkit/openvino_notebooks/blob/main/notebooks/105-language-quantize-bert/105-language-quantize-bert.ipynb) | Post-Training Quantization | OpenVINO | NLP | -| [MONAI Segmentation Model Quantization](https://github.com/openvinotoolkit/openvino_notebooks/blob/main/notebooks/110-ct-segmentation-quantize)
[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/openvinotoolkit/openvino_notebooks/HEAD?filepath=notebooks%2F110-ct-segmentation-quantize%2F110-ct-scan-live-inference.ipynb) | Post-Training Quantization | OpenVINO | Segmentation | -| [PyTorch Model Quantization](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/112-pytorch-post-training-quantization-nncf) | Post-Training Quantization | PyTorch | Image Classification | -| [TensorFlow Model Quantization](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/301-tensorflow-training-openvino) | Post-Training Quantization | Tensorflow | Image Classification | -| [Quantization with Accuracy Control](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/122-quantizing-model-with-accuracy-control) | Post-Training Quantization with Accuracy Control | OpenVINO | Speech-to-Text,
Object Detection | -| [PyTorch Training-Time Compression](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/302-pytorch-quantization-aware-training) | Training-Time Compression | PyTorch | Image Classification | -| [TensorFlow Training-Time Compression](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/301-tensorflow-training-openvino) | Training-Time Compression | Tensorflow | Image Classification | -| [Joint Pruning, Quantization and Distillation for BERT](https://github.com/openvinotoolkit/openvino_notebooks/blob/main/notebooks/116-sparsity-optimization) | Joint Pruning, Quantization and Distillation | OpenVINO | NLP | +| Notebook Tutorial Name | Compression Algorithm | Backend | Domain | +|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------:|:----------:|:-----------------------------------:| +| [BERT Quantization](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/language-quantize-bert)
[![Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/openvinotoolkit/openvino_notebooks/blob/latest/notebooks/language-quantize-bert/language-quantize-bert.ipynb) | Post-Training Quantization | OpenVINO | NLP | +| [MONAI Segmentation Model Quantization](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/ct-segmentation-quantize)
[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/openvinotoolkit/openvino_notebooks/HEAD?filepath=notebooks%2Fct-segmentation-quantize%2Fct-scan-live-inference.ipynb) | Post-Training Quantization | OpenVINO | Segmentation | +| [PyTorch Model Quantization](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/pytorch-post-training-quantization-nncf) | Post-Training Quantization | PyTorch | Image Classification | +| [TensorFlow Model Quantization](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/tensorflow-training-openvino) | Post-Training Quantization | Tensorflow | Image Classification | +| [Quantization with Accuracy Control](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/quantizing-model-with-accuracy-control) | Post-Training Quantization with Accuracy Control | OpenVINO | Speech-to-Text,
Object Detection | +| [PyTorch Training-Time Compression](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/pytorch-quantization-aware-training) | Training-Time Compression | PyTorch | Image Classification | +| [TensorFlow Training-Time Compression](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/tensorflow-training-openvino) | Training-Time Compression | Tensorflow | Image Classification | +| [Joint Pruning, Quantization and Distillation for BERT](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/sparsity-optimization) | Joint Pruning, Quantization and Distillation | OpenVINO | NLP | Below is a list of notebooks demonstrating OpenVINO conversion and inference together with NNCF compression for models from various domains. -| Demo Model | Compression Algorithm | Backend | Domain | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------:|:---------:|:--------------------------------------------------------------------:| -| [YOLOv8](https://github.com/openvinotoolkit/openvino_notebooks/blob/main/notebooks/230-yolov8-optimization)
[![Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/openvinotoolkit/openvino_notebooks/blob/main/notebooks/230-yolov8-optimization/230-yolov8-object-detection.ipynb) | Post-Training Quantization | OpenVINO | Object Detection,
KeyPoint Detection,
Instance Segmentation | -| [YOLOv7](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/226-yolov7-optimization) | Post-Training Quantization | OpenVINO | Object Detection | -| [EfficientSAM](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/274-efficient-sam) | Post-Training Quantization | OpenVINO | Image Segmentation | -| [Segment Anything Model](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/237-segment-anything) | Post-Training Quantization | OpenVINO | Image Segmentation | -| [OneFormer](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/249-oneformer-segmentation) | Post-Training Quantization | OpenVINO | Image Segmentation | -| [InstructPix2Pix](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/231-instruct-pix2pix-image-editing) | Post-Training Quantization | OpenVINO | Image-to-Image | -| [CLIP](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/228-clip-zero-shot-image-classification) | Post-Training Quantization | OpenVINO | Image-to-Text | -| [BLIP](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/233-blip-visual-language-processing) | Post-Training Quantization | OpenVINO | Image-to-Text | -| [Segmind-VegaRT](https://github.com/openvinotoolkit/openvino_notebooks/blob/main/notebooks/248-stable-diffusion-xl/248-segmind-vegart.ipynb) | Post-Training Quantization | OpenVINO | Text-to-Image | -| [Latent Consistency Model](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/263-latent-consistency-models-image-generation) | Post-Training Quantization | OpenVINO | Text-to-Image | -| [Würstchen](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/265-wuerstchen-image-generation) | Post-Training Quantization | OpenVINO | Text-to-Image | -| [ControlNet QR Code Monster](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/264-qrcode-monster) | Post-Training Quantization | OpenVINO | Text-to-Image | -| [SDXL-turbo](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/271-sdxl-turbo) | Post-Training Quantization | OpenVINO | Text-to-Image,
Image-to-Image | -| [DeepFloyd IF](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/238-deepfloyd-if) | Post-Training Quantization,
Weight Compression | OpenVINO | Text-to-Image,
Image-to-Image | -| [ImageBind](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/239-image-bind) | Post-Training Quantization | OpenVINO | Multi-Modal Retrieval | -| [Distil-Whisper](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/267-distil-whisper-asr) | Post-Training Quantization | OpenVINO | Speech-to-Text | -| [Whisper](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/227-whisper-subtitles-generation)
[![Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/openvinotoolkit/openvino_notebooks/blob/main/notebooks/227-whisper-subtitles-generation/227-whisper-convert.ipynb) | Post-Training Quantization | OpenVINO | Speech-to-Text | -| [MMS Speech Recognition](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/255-mms-massively-multilingual-speech) | Post-Training Quantization | OpenVINO | Speech-to-Text | -| [Grammar Error Correction](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/214-grammar-correction) | Post-Training Quantization | OpenVINO | NLP, Grammar Correction | -| [LLM Instruction Following](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/275-llm-question-answering) | Weight Compression | OpenVINO | NLP, Instruction Following | -| [Dolly 2.0](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/240-dolly-2-instruction-following) | Weight Compression | OpenVINO | NLP, Instruction Following | -| [Stable-Zephyr-3b](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/273-stable-zephyr-3b-chatbot) | Weight Compression | OpenVINO | NLP, Chat Bot | -| [LLM Chat Bots](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/254-llm-chatbot) | Weight Compression | OpenVINO | NLP, Chat Bot | +| Demo Model | Compression Algorithm | Backend | Domain | +|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------:|:---------:|:--------------------------------------------------------------------:| +| [YOLOv8](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/yolov8-optimization)
[![Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/openvinotoolkit/openvino_notebooks/blob/latest/notebooks/yolov8-optimization/yolov8-object-detection.ipynb) | Post-Training Quantization | OpenVINO | Object Detection,
KeyPoint Detection,
Instance Segmentation | +| [YOLOv7](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/yolov7-optimization) | Post-Training Quantization | OpenVINO | Object Detection | +| [EfficientSAM](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/efficient-sam) | Post-Training Quantization | OpenVINO | Image Segmentation | +| [Segment Anything Model](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/segment-anything) | Post-Training Quantization | OpenVINO | Image Segmentation | +| [OneFormer](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/oneformer-segmentation) | Post-Training Quantization | OpenVINO | Image Segmentation | +| [InstructPix2Pix](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/instruct-pix2pix-image-editing) | Post-Training Quantization | OpenVINO | Image-to-Image | +| [CLIP](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/clip-zero-shot-image-classification) | Post-Training Quantization | OpenVINO | Image-to-Text | +| [BLIP](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/blip-visual-language-processing) | Post-Training Quantization | OpenVINO | Image-to-Text | +| [Segmind-VegaRT](https://github.com/openvinotoolkit/openvino_notebooks/blob/latest/notebooks/stable-diffusion-xl/segmind-vegart.ipynb) | Post-Training Quantization | OpenVINO | Text-to-Image | +| [Latent Consistency Model](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/latent-consistency-models-image-generation) | Post-Training Quantization | OpenVINO | Text-to-Image | +| [Würstchen](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/wuerstchen-image-generation) | Post-Training Quantization | OpenVINO | Text-to-Image | +| [ControlNet QR Code Monster](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/qrcode-monster) | Post-Training Quantization | OpenVINO | Text-to-Image | +| [SDXL-turbo](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/sdxl-turbo) | Post-Training Quantization | OpenVINO | Text-to-Image,
Image-to-Image | +| [DeepFloyd IF](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/deepfloyd-if) | Post-Training Quantization,
Weight Compression | OpenVINO | Text-to-Image,
Image-to-Image | +| [ImageBind](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/image-bind) | Post-Training Quantization | OpenVINO | Multi-Modal Retrieval | +| [Distil-Whisper](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/distil-whisper-asr) | Post-Training Quantization | OpenVINO | Speech-to-Text | +| [Whisper](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/whisper-subtitles-generation)
[![Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/openvinotoolkit/openvino_notebooks/blob/latest/notebooks/whisper-subtitles-generation/whisper-convert.ipynb) | Post-Training Quantization | OpenVINO | Speech-to-Text | +| [MMS Speech Recognition](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/mms-massively-multilingual-speech) | Post-Training Quantization | OpenVINO | Speech-to-Text | +| [Grammar Error Correction](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/grammar-correction) | Post-Training Quantization | OpenVINO | NLP, Grammar Correction | +| [LLM Instruction Following](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/llm-question-answering) | Weight Compression | OpenVINO | NLP, Instruction Following | +| [Dolly 2.0](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/dolly-2-instruction-following) | Weight Compression | OpenVINO | NLP, Instruction Following | +| [Stable-Zephyr-3b](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/stable-zephyr-3b-chatbot) | Weight Compression | OpenVINO | NLP, Chat Bot | +| [LLM Chat Bots](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/llm-chatbot) | Weight Compression | OpenVINO | NLP, Chat Bot | ### Post-Training Quantization Examples @@ -373,7 +375,8 @@ pip install nncf[torch] Other viable options besides `[torch]` are `[tf]`, `[onnx]` and `[openvino]`. > [!WARNING] -> The way to install the module package with the extra dependency like `pip install nncf[torch]` will be deprecated in a future release. +> The way to install the module package with the extra dependency like `pip install nncf[torch]` is deprecated and will be removed in a future release. +> Instead, it is recommended to install additional dependencies separately using the pip install command (e.g., `pip install torch`) or by explicitly specifying the dependency in your requirements file. NNCF is also available via [conda](https://anaconda.org/conda-forge/nncf): @@ -388,7 +391,7 @@ conda install -c conda-forge nncf - Supported frameworks: - PyTorch\* >=2.1, <2.3 - TensorFlow\* >=2.8.4, <=2.12.1 - - ONNX\* ~=1.13.1 + - ONNX\* ==1.16.0 - OpenVINO\* >=2022.3.0 This repository is tested on Python* 3.8.10, PyTorch* 2.2.1 (NVidia CUDA\* Toolkit 12.1) and TensorFlow* 2.12.1 (NVidia CUDA\* Toolkit 11.8). diff --git a/constraints.txt b/constraints.txt index 24b247bba1e..0d087623191 100644 --- a/constraints.txt +++ b/constraints.txt @@ -19,11 +19,11 @@ pytest-cov==4.1.0 pytest-mock==3.12.0 pytest-dependency==0.6.0 -accelerate==0.24.1 +accelerate==0.28.0 addict==2.4.0 datasets==2.14.7 evaluate==0.3.0 fastdownload==0.0.7 scikit-learn==1.2.2 -transformers==4.36.0 +transformers==4.38.2 ultralytics==8.0.170 diff --git a/docs/Installation.md b/docs/Installation.md index 3fdd4acfe0a..3d0f825ab5a 100644 --- a/docs/Installation.md +++ b/docs/Installation.md @@ -12,14 +12,6 @@ NNCF can be installed as a regular PyPI package via pip: pip install nncf ``` -If you want to install both NNCF and the supported PyTorch version in one line, you can do this by simply running: - -```bash -pip install nncf[torch] -``` - -Other viable options besides `[torch]` are `[tf]`, `[onnx]` and `[openvino]`. - ## As a package built from a checked-out repository Install the package and its dependencies by running the following command in the repository root directory: @@ -28,20 +20,6 @@ Install the package and its dependencies by running the following command in the pip install . ``` -Use the same `pip install` syntax as above to install NNCF along with the backend package version in one go: - -```bash -pip install .[] -``` - -List of supported backends: `torch`, `tf`, `onnx` and `openvino`. - -For development purposes install extra packages by - -```bash -pip install .[dev,tests] -``` - _NB_: For launching example scripts in this repository, we recommend setting the `PYTHONPATH` variable to the root of the checked-out repository once the installation is completed. NNCF is also available via [conda](https://anaconda.org/conda-forge/nncf): @@ -65,7 +43,7 @@ as well as the supported versions of Python: | NNCF | OpenVINO | PyTorch | ONNX | TensorFlow | Python | |-----------|------------|----------|----------|------------|--------| -| `develop` | `2024.4.0` | `2.2.1` | `1.13.1` | `2.12.0` | `3.8` | +| `develop` | `2024.4.0` | `2.2.1` | `1.16.0` | `2.12.0` | `3.8` | | `2.9.0` | `2024.4.0` | `2.1.2` | `1.13.1` | `2.12.0` | `3.8` | | `2.8.1` | `2023.3.0` | `2.1.2` | `1.13.1` | `2.12.0` | `3.8` | | `2.8.0` | `2023.3.0` | `2.1.2` | `1.13.1` | `2.12.0` | `3.8` | diff --git a/docs/compression_algorithms/CompressWeights.md b/docs/compression_algorithms/CompressWeights.md index bc53948441b..766ab0fb97a 100644 --- a/docs/compression_algorithms/CompressWeights.md +++ b/docs/compression_algorithms/CompressWeights.md @@ -408,7 +408,7 @@ This modification applies only for patterns `MatMul-Multiply-MatMul` (for exampl List of notebooks demonstrating OpenVINO conversion and inference together with NNCF weight compression for models from various domains: -- [LLM Instruction Following](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/275-llm-question-answering) -- [Dolly 2.0](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/240-dolly-2-instruction-following) -- [Stable-Zephyr-3b](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/273-stable-zephyr-3b-chatbot) -- [LLM Chat Bots](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/254-llm-chatbot) +- [LLM Instruction Following](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/llm-question-answering) +- [Dolly 2.0](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/dolly-2-instruction-following) +- [Stable-Zephyr-3b](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/stable-zephyr-3b-chatbot) +- [LLM Chat Bots](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/llm-chatbot) diff --git a/docs/compression_algorithms/post_training/Quantization.md b/docs/compression_algorithms/post_training/Quantization.md index 2f8a1b20104..f5e3542e2b8 100644 --- a/docs/compression_algorithms/post_training/Quantization.md +++ b/docs/compression_algorithms/post_training/Quantization.md @@ -89,3 +89,5 @@ for data_item in val_loader: NNCF provides the examples of Post-Training Quantization where you can find the implementation of data transformation function: [PyTorch](../../../examples/post_training_quantization/torch/mobilenet_v2/README.md), [TensorFlow](../../../examples/post_training_quantization/tensorflow/mobilenet_v2/README.md), [ONNX](../../../examples/post_training_quantization/onnx/mobilenet_v2/README.md), and [OpenVINO](../../../examples/post_training_quantization/openvino/mobilenet_v2/README.md) + +In case the Post-Training Quantization algorithm could not reach quality requirements you can fine-tune a quantized pytorch model. Example of the Quantization-Aware training pipeline for a pytorch model could be found [here](../../../examples/quantization_aware_training/torch/resnet18/README.md). diff --git a/docs/styleguide/PyGuide.md b/docs/styleguide/PyGuide.md index 811c8633616..5e5732eaafa 100644 --- a/docs/styleguide/PyGuide.md +++ b/docs/styleguide/PyGuide.md @@ -316,10 +316,10 @@ inline comments. #### 4.2.1 Modules -Every file should contain a license boilerplate. +Every file should contain a license boilerplate, where [YYYY] should be replaced to current year. ```python -# Copyright (c) 2023 Intel Corporation +# Copyright (c) [YYYY] Intel Corporation # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -803,7 +803,7 @@ def log_current_time(log_stream: LogStream): ```python3 class CheckpointConverter: - # ... + # ... def convert(self, ckpt: CheckpointType) -> AnotherCheckpointType: pass ``` diff --git a/examples/post_training_quantization/onnx/mobilenet_v2/main.py b/examples/post_training_quantization/onnx/mobilenet_v2/main.py index e35a3cc61bd..a846f33726d 100755 --- a/examples/post_training_quantization/onnx/mobilenet_v2/main.py +++ b/examples/post_training_quantization/onnx/mobilenet_v2/main.py @@ -44,7 +44,7 @@ def download_model() -> Path: return download_url(MODEL_URL, Path(MODEL_PATH).resolve()) -def validate(path_to_model: str, validation_loader: torch.utils.data.DataLoader) -> float: +def validate(path_to_model: Path, validation_loader: torch.utils.data.DataLoader) -> float: predictions = [] references = [] @@ -61,7 +61,7 @@ def validate(path_to_model: str, validation_loader: torch.utils.data.DataLoader) return accuracy_score(predictions, references) -def run_benchmark(path_to_model: str, shape: Optional[List[int]] = None, verbose: bool = True) -> float: +def run_benchmark(path_to_model: Path, shape: Optional[List[int]] = None, verbose: bool = True) -> float: command = f"benchmark_app -m {path_to_model} -d CPU -api async -t 15" if shape is not None: command += f' -shape [{",".join(str(x) for x in shape)}]' @@ -79,7 +79,7 @@ def run_benchmark(path_to_model: str, shape: Optional[List[int]] = None, verbose normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) val_dataset = datasets.ImageFolder( - root=f"{dataset_path}/val", + root=dataset_path / "val", transform=transforms.Compose( [ transforms.Resize(256), @@ -127,11 +127,11 @@ def transform_fn(data_item): ############################################################################### # Benchmark performance and validate accuracy -fp32_model_path = f"{ROOT}/mobilenet_v2_fp32.onnx" +fp32_model_path = ROOT / "mobilenet_v2_fp32.onnx" onnx.save(model, fp32_model_path) print(f"[1/7] Save FP32 model: {fp32_model_path}") -int8_model_path = f"{ROOT}/mobilenet_v2_int8.onnx" +int8_model_path = ROOT / "mobilenet_v2_int8.onnx" onnx.save(onnx_quantized_model, int8_model_path) print(f"[2/7] Save INT8 model: {int8_model_path}") diff --git a/examples/post_training_quantization/onnx/yolov8_quantize_with_accuracy_control/README.md b/examples/post_training_quantization/onnx/yolov8_quantize_with_accuracy_control/README.md index 906c9ae483f..57e1c028cdd 100644 --- a/examples/post_training_quantization/onnx/yolov8_quantize_with_accuracy_control/README.md +++ b/examples/post_training_quantization/onnx/yolov8_quantize_with_accuracy_control/README.md @@ -44,4 +44,4 @@ python deploy.py ## See also -- [YOLOv8 Jupyter notebook](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/230-yolov8-optimization) +- [YOLOv8 Jupyter notebook](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/yolov8-optimization) diff --git a/examples/post_training_quantization/onnx/yolov8_quantize_with_accuracy_control/main.py b/examples/post_training_quantization/onnx/yolov8_quantize_with_accuracy_control/main.py index 16d7bf53a1f..2109b750b3b 100644 --- a/examples/post_training_quantization/onnx/yolov8_quantize_with_accuracy_control/main.py +++ b/examples/post_training_quantization/onnx/yolov8_quantize_with_accuracy_control/main.py @@ -195,20 +195,17 @@ def validation_ac( preset=nncf.QuantizationPreset.MIXED, ignored_scope=nncf.IgnoredScope( types=["Mul", "Sub", "Sigmoid"], # ignore operations - names=[ - "/model.22/dfl/conv/Conv", # in the post-processing subgraph - "/model.22/Add", - "/model.22/Add_1", - "/model.22/Add_2", - "/model.22/Add_3", - "/model.22/Add_4", - "/model.22/Add_5", - "/model.22/Add_6", - "/model.22/Add_7", - "/model.22/Add_8", - "/model.22/Add_9", - "/model.22/Add_10", - "/model.22/Add_11", + subgraphs=[ + nncf.Subgraph( + inputs=[ + "/model.22/Concat_3", + "/model.22/Concat_6", + "/model.22/Concat_24", + "/model.22/Concat_5", + "/model.22/Concat_4", + ], + outputs=["/model.22/Concat_29"], + ) ], ), ) diff --git a/examples/post_training_quantization/openvino/mobilenet_v2/main.py b/examples/post_training_quantization/openvino/mobilenet_v2/main.py index f2a4a136754..ec625e3ded5 100644 --- a/examples/post_training_quantization/openvino/mobilenet_v2/main.py +++ b/examples/post_training_quantization/openvino/mobilenet_v2/main.py @@ -56,7 +56,7 @@ def validate(model: ov.Model, val_loader: torch.utils.data.DataLoader) -> float: return accuracy_score(predictions, references) -def run_benchmark(model_path: str, shape: Optional[List[int]] = None, verbose: bool = True) -> float: +def run_benchmark(model_path: Path, shape: Optional[List[int]] = None, verbose: bool = True) -> float: command = f"benchmark_app -m {model_path} -d CPU -api async -t 15" if shape is not None: command += f' -shape [{",".join(str(x) for x in shape)}]' @@ -67,7 +67,7 @@ def run_benchmark(model_path: str, shape: Optional[List[int]] = None, verbose: b return float(match.group(1)) -def get_model_size(ir_path: str, m_type: str = "Mb", verbose: bool = True) -> float: +def get_model_size(ir_path: Path, m_type: str = "Mb", verbose: bool = True) -> float: xml_size = os.path.getsize(ir_path) bin_size = os.path.getsize(os.path.splitext(ir_path)[0] + ".bin") for t in ["bytes", "Kb", "Mb"]: @@ -90,7 +90,7 @@ def get_model_size(ir_path: str, m_type: str = "Mb", verbose: bool = True) -> fl normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) val_dataset = datasets.ImageFolder( - root=f"{dataset_path}/val", + root=dataset_path / "val", transform=transforms.Compose( [ transforms.Resize(256), @@ -136,12 +136,12 @@ def transform_fn(data_item): ############################################################################### # Benchmark performance, calculate compression rate and validate accuracy -fp32_ir_path = f"{ROOT}/mobilenet_v2_fp32.xml" +fp32_ir_path = ROOT / "mobilenet_v2_fp32.xml" ov.save_model(ov_model, fp32_ir_path, compress_to_fp16=False) print(f"[1/7] Save FP32 model: {fp32_ir_path}") fp32_model_size = get_model_size(fp32_ir_path, verbose=True) -int8_ir_path = f"{ROOT}/mobilenet_v2_int8.xml" +int8_ir_path = ROOT / "mobilenet_v2_int8.xml" ov.save_model(ov_quantized_model, int8_ir_path, compress_to_fp16=False) print(f"[2/7] Save INT8 model: {int8_ir_path}") int8_model_size = get_model_size(int8_ir_path, verbose=True) diff --git a/examples/post_training_quantization/openvino/yolov8/README.md b/examples/post_training_quantization/openvino/yolov8/README.md index f861b3c5b15..78a85325634 100644 --- a/examples/post_training_quantization/openvino/yolov8/README.md +++ b/examples/post_training_quantization/openvino/yolov8/README.md @@ -34,4 +34,4 @@ python main.py ## See also -- [YOLOv8 Jupyter notebook](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/230-yolov8-optimization) +- [YOLOv8 Jupyter notebook](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/yolov8-optimization) diff --git a/examples/post_training_quantization/openvino/yolov8/main.py b/examples/post_training_quantization/openvino/yolov8/main.py index d8513400e4d..5ecdedcb120 100644 --- a/examples/post_training_quantization/openvino/yolov8/main.py +++ b/examples/post_training_quantization/openvino/yolov8/main.py @@ -122,20 +122,12 @@ def transform_fn(data_item: Dict): quantization_dataset, preset=nncf.QuantizationPreset.MIXED, ignored_scope=nncf.IgnoredScope( - types=["Multiply", "Subtract", "Sigmoid"], # ignore operations - names=[ - "/model.22/dfl/conv/Conv", # in the post-processing subgraph - "/model.22/Add", - "/model.22/Add_1", - "/model.22/Add_2", - "/model.22/Add_3", - "/model.22/Add_4", - "/model.22/Add_5", - "/model.22/Add_6", - "/model.22/Add_7", - "/model.22/Add_8", - "/model.22/Add_9", - "/model.22/Add_10", + types=["Multiply", "Subtract", "Sigmoid"], + subgraphs=[ + nncf.Subgraph( + inputs=["/model.22/Concat", "/model.22/Concat_1", "/model.22/Concat_2"], + outputs=["output0/sink_port_0"], + ) ], ), ) diff --git a/examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/README.md b/examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/README.md index f5649efbdf8..5dcc75450a9 100644 --- a/examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/README.md +++ b/examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/README.md @@ -36,4 +36,4 @@ python main.py ## See also -- [YOLOv8 Jupyter notebook](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/230-yolov8-optimization) +- [YOLOv8 Jupyter notebook](https://github.com/openvinotoolkit/openvino_notebooks/tree/latest/notebooks/yolov8-optimization) diff --git a/examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/main.py b/examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/main.py index 79722754ced..255fe9fe5f9 100644 --- a/examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/main.py +++ b/examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/main.py @@ -186,20 +186,17 @@ def validation_ac( preset=nncf.QuantizationPreset.MIXED, ignored_scope=nncf.IgnoredScope( types=["Multiply", "Subtract", "Sigmoid"], # ignore operations - names=[ - "/model.22/dfl/conv/Conv", # in the post-processing subgraph - "/model.22/Add", - "/model.22/Add_1", - "/model.22/Add_2", - "/model.22/Add_3", - "/model.22/Add_4", - "/model.22/Add_5", - "/model.22/Add_6", - "/model.22/Add_7", - "/model.22/Add_8", - "/model.22/Add_9", - "/model.22/Add_10", - "/model.22/Add_11", + subgraphs=[ + nncf.Subgraph( + inputs=[ + "/model.22/Concat_3", + "/model.22/Concat_6", + "/model.22/Concat_24", + "/model.22/Concat_5", + "/model.22/Concat_4", + ], + outputs=["output0"], + ) ], ), ) diff --git a/examples/post_training_quantization/torch/mobilenet_v2/main.py b/examples/post_training_quantization/torch/mobilenet_v2/main.py index b053ad43499..a78452be87a 100644 --- a/examples/post_training_quantization/torch/mobilenet_v2/main.py +++ b/examples/post_training_quantization/torch/mobilenet_v2/main.py @@ -63,7 +63,7 @@ def validate(model: ov.Model, val_loader: torch.utils.data.DataLoader) -> float: return accuracy_score(predictions, references) -def run_benchmark(model_path: str, shape: Optional[List[int]] = None, verbose: bool = True) -> float: +def run_benchmark(model_path: Path, shape: Optional[List[int]] = None, verbose: bool = True) -> float: command = f"benchmark_app -m {model_path} -d CPU -api async -t 15" if shape is not None: command += f' -shape [{",".join(str(x) for x in shape)}]' @@ -74,7 +74,7 @@ def run_benchmark(model_path: str, shape: Optional[List[int]] = None, verbose: b return float(match.group(1)) -def get_model_size(ir_path: str, m_type: str = "Mb", verbose: bool = True) -> float: +def get_model_size(ir_path: Path, m_type: str = "Mb", verbose: bool = True) -> float: xml_size = os.path.getsize(ir_path) bin_size = os.path.getsize(os.path.splitext(ir_path)[0] + ".bin") for t in ["bytes", "Kb", "Mb"]: @@ -97,7 +97,7 @@ def get_model_size(ir_path: str, m_type: str = "Mb", verbose: bool = True) -> fl normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) val_dataset = datasets.ImageFolder( - root=f"{dataset_path}/val", + root=dataset_path / "val", transform=transforms.Compose( [ transforms.Resize(256), @@ -107,7 +107,8 @@ def get_model_size(ir_path: str, m_type: str = "Mb", verbose: bool = True) -> fl ] ), ) -val_data_loader = torch.utils.data.DataLoader(val_dataset) +batch_size = 128 +val_data_loader = torch.utils.data.DataLoader(val_dataset, batch_size=batch_size) torch_model = models.mobilenet_v2(num_classes=DATASET_CLASSES) torch_model = load_checkpoint(torch_model) @@ -140,8 +141,10 @@ def transform_fn(data_item: Tuple[torch.Tensor, int], device: torch.device) -> t # item and prepare model input data. The quantize method uses a small subset # (default: 300 samples) of the calibration dataset. +# Recalculation default subset_size parameter based on batch_size. +subset_size = 300 // batch_size calibration_dataset = nncf.Dataset(val_data_loader, partial(transform_fn, device=device)) -torch_quantized_model = nncf.quantize(torch_model, calibration_dataset) +torch_quantized_model = nncf.quantize(torch_model, calibration_dataset, subset_size=subset_size) ############################################################################### # Benchmark performance, calculate compression rate and validate accuracy @@ -150,12 +153,12 @@ def transform_fn(data_item: Tuple[torch.Tensor, int], device: torch.device) -> t ov_model = ov.convert_model(torch_model.cpu(), example_input=dummy_input) ov_quantized_model = ov.convert_model(torch_quantized_model.cpu(), example_input=dummy_input) -fp32_ir_path = f"{ROOT}/mobilenet_v2_fp32.xml" +fp32_ir_path = ROOT / "mobilenet_v2_fp32.xml" ov.save_model(ov_model, fp32_ir_path, compress_to_fp16=False) print(f"[1/7] Save FP32 model: {fp32_ir_path}") fp32_model_size = get_model_size(fp32_ir_path, verbose=True) -int8_ir_path = f"{ROOT}/mobilenet_v2_int8.xml" +int8_ir_path = ROOT / "mobilenet_v2_int8.xml" ov.save_model(ov_quantized_model, int8_ir_path, compress_to_fp16=False) print(f"[2/7] Save INT8 model: {int8_ir_path}") int8_model_size = get_model_size(int8_ir_path, verbose=True) diff --git a/examples/post_training_quantization/torch/ssd300_vgg16/main.py b/examples/post_training_quantization/torch/ssd300_vgg16/main.py index fc359e750b3..1b586f4a995 100644 --- a/examples/post_training_quantization/torch/ssd300_vgg16/main.py +++ b/examples/post_training_quantization/torch/ssd300_vgg16/main.py @@ -70,9 +70,9 @@ def run_benchmark(model_path: str, shape=None, verbose: bool = True) -> float: class COCO128Dataset(torch.utils.data.Dataset): category_mapping = [ - 1,2,3,4,5,6,7,8,9,10,11,13,14,15,16,17,18,19,20,21,22,23,24,25,27,28,31,32,33, - 34,35,36,37,38,39,40,41,42,43,44,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60, - 61,62,63,64,65,67,70,72,73,74,75,76,77,78,79,80,81,82,84,85,86,87,88,89,90 + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 27, 28, 31, 32, 33, + 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, + 61, 62, 63, 64, 65, 67, 70, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 84, 85, 86, 87, 88, 89, 90 ] # fmt: skip def __init__(self, data_path: str, transform: Callable): diff --git a/examples/torch/common/models/classification/mobilenet_v3_tv_092.py b/examples/torch/common/models/classification/mobilenet_v3_tv_092.py index 5ce32170bd2..1ba5f4d5cb3 100644 --- a/examples/torch/common/models/classification/mobilenet_v3_tv_092.py +++ b/examples/torch/common/models/classification/mobilenet_v3_tv_092.py @@ -305,7 +305,7 @@ def _mobilenet_v3_model( ): model = MobileNetV3(inverted_residual_setting, last_channel, **kwargs) if pretrained: - if model_urls.get(arch, None) is None: + if model_urls.get(arch) is None: raise ValueError("No checkpoint is available for model type {}".format(arch)) state_dict = load_state_dict_from_url(model_urls[arch], progress=progress) model.load_state_dict(state_dict) diff --git a/nncf/__init__.py b/nncf/__init__.py index eaaa755a49e..9af922c21a5 100644 --- a/nncf/__init__.py +++ b/nncf/__init__.py @@ -45,7 +45,10 @@ from nncf.quantization.advanced_parameters import ( AdvancedAccuracyRestorerParameters as AdvancedAccuracyRestorerParameters, ) +from nncf.quantization.advanced_parameters import AdvancedBiasCorrectionParameters as AdvancedBiasCorrectionParameters from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters as AdvancedQuantizationParameters +from nncf.quantization.advanced_parameters import AdvancedSmoothQuantParameters as AdvancedSmoothQuantParameters +from nncf.quantization.advanced_parameters import OverflowFix as OverflowFix from nncf.scopes import IgnoredScope as IgnoredScope from nncf.scopes import Subgraph as Subgraph from nncf.version import __version__ as __version__ diff --git a/nncf/common/graph/graph.py b/nncf/common/graph/graph.py index 1f9795fc721..13ea932d921 100644 --- a/nncf/common/graph/graph.py +++ b/nncf/common/graph/graph.py @@ -8,12 +8,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import pathlib from collections import defaultdict from copy import deepcopy -from typing import Any, Callable, Dict, Generator, KeysView, List, Optional, Tuple, Type, ValuesView +from typing import Any, Callable, Dict, Generator, KeysView, List, Optional, Tuple, Type, ValuesView, cast -import networkx as nx -import networkx.algorithms.isomorphism as iso +import networkx as nx # type:ignore +import networkx.algorithms.isomorphism as iso # type:ignore +from networkx.classes.reportviews import OutEdgeView # type:ignore import nncf from nncf.common.graph.graph_matching import find_subgraphs_matching_pattern @@ -46,7 +48,7 @@ class NNCFNode: IS_INTEGER_INPUT_NODE_ATTR = "is_integer_input" IS_SHARED_ATTR = "is_shared" - def __init__(self, attributes: Dict[str, Any]): + def __init__(self, attributes: Dict[str, Any]) -> None: self._attributes = attributes @property @@ -55,23 +57,23 @@ def attributes(self) -> Dict[str, Any]: @property def node_id(self) -> int: - return self._attributes[NNCFNode.ID_NODE_ATTR] + return cast(int, self._attributes[NNCFNode.ID_NODE_ATTR]) @property def node_key(self) -> str: - return self._attributes[NNCFNode.KEY_NODE_ATTR] + return cast(str, self._attributes[NNCFNode.KEY_NODE_ATTR]) @property def node_name(self) -> NNCFNodeName: - return self._attributes[NNCFNode.NODE_NAME_ATTR] + return cast(NNCFNodeName, self._attributes[NNCFNode.NODE_NAME_ATTR]) @property def metatype(self) -> Type[OperatorMetatype]: - return self._attributes[NNCFNode.METATYPE_ATTR] + return cast(Type[OperatorMetatype], self._attributes[NNCFNode.METATYPE_ATTR]) @property def node_type(self) -> str: - return self._attributes[NNCFNode.NODE_TYPE_ATTR] + return cast(str, self._attributes[NNCFNode.NODE_TYPE_ATTR]) @property def layer_name(self) -> Optional[LayerName]: @@ -91,27 +93,27 @@ def layer_attributes(self, value: BaseLayerAttributes) -> None: @property def ignored_algorithms(self) -> List[str]: - return self._attributes[NNCFNode.IGNORED_ALGOS_ATTR] + return cast(List[str], self._attributes[NNCFNode.IGNORED_ALGOS_ATTR]) def is_in_iteration_scope(self) -> bool: - return self._attributes[NNCFNode.IS_IN_ITERATION_SCOPE_NODE_ATTR] + return cast(bool, self._attributes[NNCFNode.IS_IN_ITERATION_SCOPE_NODE_ATTR]) def is_integer_input(self) -> bool: - return self._attributes[NNCFNode.IS_INTEGER_INPUT_NODE_ATTR] + return cast(bool, self._attributes[NNCFNode.IS_INTEGER_INPUT_NODE_ATTR]) def is_shared(self) -> bool: - return self._attributes[NNCFNode.IS_SHARED_ATTR] + return cast(bool, self._attributes[NNCFNode.IS_SHARED_ATTR]) - def __repr__(self): + def __repr__(self) -> str: return str(self) - def __str__(self): + def __str__(self) -> str: return " ".join([str(self.node_id), self.node_name, self.node_type]) - def __hash__(self): + def __hash__(self) -> int: return hash(str(self)) - def __eq__(self, other): + def __eq__(self, other: object) -> bool: return isinstance(other, NNCFNode) and self.attributes == other.attributes @@ -131,7 +133,7 @@ def __init__( tensor_shape: List[int], dtype: Dtype, parallel_input_port_ids: List[int], - ): + ) -> None: """ :param from_node: An NNCFNode that sources the directed edge. :param to_node: An NNCFNode that sinks the directed edge. @@ -144,14 +146,14 @@ def __init__( self.to_node = to_node self.input_port_id = input_port_id self.output_port_id = output_port_id - self.tensor_shape: Tuple[int] = tuple(tensor_shape) + self.tensor_shape: Tuple[int, ...] = tuple(tensor_shape) self.dtype = dtype self.parallel_input_port_ids = parallel_input_port_ids - def __str__(self): + def __str__(self) -> str: return f"{self.from_node}:{self.output_port_id} -> {self.tensor_shape} -> {self.to_node}:{self.input_port_id}" - def __hash__(self): + def __hash__(self) -> int: return hash( ( self.from_node, @@ -164,7 +166,7 @@ def __hash__(self): ) ) - def __eq__(self, other): + def __eq__(self, other: object) -> bool: return isinstance(other, NNCFGraphEdge) and self.__dict__ == other.__dict__ @@ -190,9 +192,9 @@ class NNCFGraph: DTYPE_EDGE_ATTR = "dtype" PARALLEL_INPUT_PORT_IDS_ATTR = "parallel_input_ports" - def __init__(self): + def __init__(self) -> None: self._nx_graph = nx.DiGraph() - self._node_id_to_key_dict = {} + self._node_id_to_key_dict: Dict[int, str] = {} self._nodes: Dict[str, NNCFNode] = {} self._input_nncf_nodes: Dict[int, NNCFNode] = {} self._output_nncf_nodes: Dict[int, NNCFNode] = {} @@ -288,18 +290,20 @@ def get_all_simple_paths( end_node = self.get_node_by_name(end_node_name) start_node_key = self.get_node_key_by_id(start_node.node_id) end_node_key = self.get_node_key_by_id(end_node.node_id) - return nx.all_simple_paths(self._nx_graph, start_node_key, end_node_key) + return cast( + Generator[List[NNCFNodeName], None, None], nx.all_simple_paths(self._nx_graph, start_node_key, end_node_key) + ) @staticmethod def _get_edge_boundaries( match: List[str], graph: nx.DiGraph - ) -> Tuple[List[Tuple[str, str]], List[Tuple[str, str]]]: + ) -> Tuple[List[Tuple[str, str, Dict[str, Any]]], List[Tuple[str, str, Dict[str, Any]]]]: out_edge_boundary = list(nx.edge_boundary(graph, match, data=True)) complement = list(filter(lambda x: x not in match, graph.nodes.keys())) in_edge_boundary = list(nx.edge_boundary(graph, complement, data=True)) return sorted(in_edge_boundary), sorted(out_edge_boundary) # must be sorted for determinism - def get_node_key_by_id(self, node_id: id) -> str: + def get_node_key_by_id(self, node_id: int) -> str: """ Returns node key (node_name) by provided id. @@ -369,7 +373,7 @@ def _get_edges(self, from_node: NNCFNode, to_node: NNCFNode) -> List[NNCFGraphEd to_node=edge.to_node, input_port_id=input_port_id, output_port_id=edge.output_port_id, - tensor_shape=edge.tensor_shape, + tensor_shape=list(edge.tensor_shape), dtype=edge.dtype, parallel_input_port_ids=[], ) @@ -381,7 +385,7 @@ def traverse_graph( curr_node: NNCFNode, traverse_function: Callable[[NNCFNode, List[Any]], Tuple[bool, List[Any]]], traverse_forward: bool = True, - ): + ) -> List[Any]: """ Traverses graph up or down starting form `curr_node` node. @@ -390,7 +394,7 @@ def traverse_graph( :param traverse_forward: Flag specifying direction of traversal. :return: """ - output = [] + output: List[Any] = [] return self._traverse_graph_recursive_helper(curr_node, traverse_function, output, traverse_forward) def _traverse_graph_recursive_helper( @@ -399,7 +403,7 @@ def _traverse_graph_recursive_helper( traverse_function: Callable[[NNCFNode, List[Any]], Tuple[bool, List[Any]]], output: List[Any], traverse_forward: bool, - ): + ) -> List[Any]: is_finished, output = traverse_function(curr_node, output) get_nodes_fn = self.get_next_nodes if traverse_forward else self.get_previous_nodes if not is_finished: @@ -450,7 +454,7 @@ def add_nncf_node( if node_id_override is not None: node_id = node_id_override else: - node_ids = self.get_all_node_ids() + node_ids = list(self.get_all_node_ids()) if node_ids: node_id = max(self.get_all_node_ids()) + 1 else: @@ -508,7 +512,7 @@ def add_edge_between_nncf_nodes( output_port_id: int, dtype: Dtype, parallel_input_port_ids: Optional[List[int]] = None, - ): + ) -> None: """ Adds a directed edge between two `NNCFNode`s that are already present in the graph. The edge represents an activation tensor, produced or consumed by an operation (which is represented by a node) @@ -559,12 +563,12 @@ def topological_sort(self) -> List[NNCFNode]: ) ] - def dump_graph(self, path: str): - write_dot_graph(self.get_graph_for_structure_analysis(), path) + def dump_graph(self, path: str) -> None: + write_dot_graph(self.get_graph_for_structure_analysis(), pathlib.Path(path)) - def visualize_graph(self, path: str): + def visualize_graph(self, path: str) -> None: out_graph = self._get_graph_for_visualization() - write_dot_graph(out_graph, path) + write_dot_graph(out_graph, pathlib.Path(path)) def get_graph_for_structure_analysis(self, extended: bool = False) -> nx.DiGraph: """ @@ -633,7 +637,7 @@ def _get_graph_for_visualization(self) -> nx.DiGraph: mapping = {k: v["label"] for k, v in out_graph.nodes.items()} out_graph = nx.relabel_nodes(out_graph, mapping) for node in out_graph.nodes.values(): - node.pop("label") + node.pop("label") # type: ignore return out_graph @@ -647,14 +651,16 @@ def get_node_by_name(self, name: NNCFNodeName) -> NNCFNode: node_key = f"{node_ids[0]} {name}" return self._nodes[node_key] - def __eq__(self, other: "NNCFGraph"): + def __eq__(self, other: object) -> bool: nm = iso.categorical_node_match( [NNCFNode.ID_NODE_ATTR, NNCFNode.KEY_NODE_ATTR, NNCFNode.LAYER_ATTRIBUTES], [None, None, None] ) em = iso.categorical_edge_match( [NNCFGraph.ACTIVATION_SHAPE_EDGE_ATTR, NNCFGraph.INPUT_PORT_ID_EDGE_ATTR], [None, None] ) - return nx.is_isomorphic(self._nx_graph, other._nx_graph, node_match=nm, edge_match=em) + return isinstance(other, NNCFGraph) and bool( + nx.is_isomorphic(self._nx_graph, other._nx_graph, node_match=nm, edge_match=em) + ) def get_nx_graph_copy(self) -> nx.DiGraph: return deepcopy(self._nx_graph) @@ -697,13 +703,13 @@ def get_nncf_graph_pattern_io(self, match: List[str]) -> NNCFGraphPatternIO: return NNCFGraphPatternIO(input_nncf_edges, output_nncf_edges) - def get_nx_edge(self, node_u: NNCFNode, node_v: NNCFNode): + def get_nx_edge(self, node_u: NNCFNode, node_v: NNCFNode) -> OutEdgeView: nx_node_u = self._nx_graph.nodes[self._node_id_to_key_dict[node_u.node_id]] nx_node_v = self._nx_graph.nodes[self._node_id_to_key_dict[node_v.node_id]] return self._nx_graph.edges[nx_node_u["key"], nx_node_v["key"]] - def get_nodes_count(self): - return self._nx_graph.number_of_nodes() + def get_nodes_count(self) -> int: + return int(self._nx_graph.number_of_nodes()) def get_edge(self, from_node: NNCFNode, to_node: NNCFNode) -> NNCFGraphEdge: """ @@ -741,7 +747,7 @@ def remove_nodes_from(self, nodes: List[NNCFNode]) -> None: self._node_id_to_key_dict = {} for node_key, node in self._nx_graph.nodes.items(): - self._node_id_to_key_dict[node["id"]] = node_key + self._node_id_to_key_dict[node["id"]] = node_key # type:ignore def find_matching_subgraphs(self, patterns: GraphPattern, strict: bool = True) -> List[List[NNCFNode]]: """ diff --git a/nncf/common/graph/graph_matching.py b/nncf/common/graph/graph_matching.py index a4af34edf43..d2bc0886ffe 100644 --- a/nncf/common/graph/graph_matching.py +++ b/nncf/common/graph/graph_matching.py @@ -10,15 +10,15 @@ # limitations under the License. from typing import Dict, List -import networkx as nx -import networkx.algorithms.isomorphism as ism +import networkx as nx # type:ignore +import networkx.algorithms.isomorphism as ism # type:ignore from nncf.common.graph.patterns import GraphPattern ATTRS_TO_SKIP = [GraphPattern.LABEL_ATTR, GraphPattern.PATTERN_NODE_TO_EXCLUDE] -def _are_nodes_matched(node_1, node_2) -> bool: +def _are_nodes_matched(node_1, node_2) -> bool: # type:ignore for attr in node_2: if attr in ATTRS_TO_SKIP: continue diff --git a/nncf/common/graph/layer_attributes.py b/nncf/common/graph/layer_attributes.py index 06da97313b8..bf45dae94f6 100644 --- a/nncf/common/graph/layer_attributes.py +++ b/nncf/common/graph/layer_attributes.py @@ -43,7 +43,7 @@ def __init__(self, axis: int, num_inputs: Optional[int] = None): class MultipleOutputLayerAttributes(BaseLayerAttributes): - def __init__(self, chunks: Union[int, List], axis: int): + def __init__(self, chunks: Union[int, List[Any]], axis: int): """ :param chunks: Number of chunks (outputs). diff --git a/nncf/common/graph/model_transformer.py b/nncf/common/graph/model_transformer.py index 18874405efd..c63ba078cd3 100644 --- a/nncf/common/graph/model_transformer.py +++ b/nncf/common/graph/model_transformer.py @@ -29,7 +29,7 @@ def __init__(self, model: TModel): """ self._model = model - def transform(self, transformation_layout: TransformationLayout) -> TModel: + def transform(self, transformation_layout: TransformationLayout) -> TModel: # type:ignore """ Applies transformations to the model. diff --git a/nncf/common/graph/operator_metatypes.py b/nncf/common/graph/operator_metatypes.py index e65f659471f..100f510428a 100644 --- a/nncf/common/graph/operator_metatypes.py +++ b/nncf/common/graph/operator_metatypes.py @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import List, Optional, Set, Type +from typing import Callable, Dict, List, Optional, Set, Type import nncf from nncf.common.graph.definitions import NNCFGraphNodeType @@ -77,19 +77,20 @@ def __init__(self, name: str): :param name: The registry name. """ super().__init__(name) - self._op_name_to_op_meta_dict = {} + self._op_name_to_op_meta_dict: Dict[str, Type[OperatorMetatype]] = {} - def register(self, name: Optional[str] = None): + def register(self, name: Optional[str] = None, is_subtype: bool = False) -> Callable[..., Type[OperatorMetatype]]: """ Decorator for registering operator metatypes. :param name: The registration name. + :param is_subtype: Whether the decorated metatype is a subtype of another registered operator. :return: The inner function for registering operator metatypes. """ name_ = name super_register = super()._register - def wrap(obj: Type[OperatorMetatype]): + def wrap(obj: Type[OperatorMetatype]) -> Type[OperatorMetatype]: """ Inner function for registering operator metatypes. @@ -100,15 +101,15 @@ def wrap(obj: Type[OperatorMetatype]): if cls_name is None: cls_name = obj.__name__ super_register(obj, cls_name) - op_names = obj.get_all_aliases() - for name in op_names: - if name in self._op_name_to_op_meta_dict and not obj.subtype_check(self._op_name_to_op_meta_dict[name]): - raise nncf.InternalError( - "Inconsistent operator metatype registry - single patched " - "op name maps to multiple metatypes!" - ) - - self._op_name_to_op_meta_dict[name] = obj + if not is_subtype: + op_names = obj.get_all_aliases() + for name in op_names: + if name in self._op_name_to_op_meta_dict: + raise nncf.InternalError( + "Inconsistent operator metatype registry - single patched " + f"op name `{name}` maps to multiple metatypes!" + ) + self._op_name_to_op_meta_dict[name] = obj return obj return wrap diff --git a/nncf/common/graph/patterns/manager.py b/nncf/common/graph/patterns/manager.py index 0139ae12e50..824a3b4a4c5 100644 --- a/nncf/common/graph/patterns/manager.py +++ b/nncf/common/graph/patterns/manager.py @@ -8,7 +8,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Callable, Dict, Optional, Union +from typing import Callable, Dict, Optional, Union, cast from nncf.common.graph.patterns.patterns import GraphPattern from nncf.common.graph.patterns.patterns import HWFusedPatternNames @@ -34,18 +34,22 @@ def _get_backend_hw_patterns_map(backend: BackendType) -> Dict[HWFusedPatternNam :param backend: BackendType instance. :return: Dictionary with the HWFusedPatternNames instance as keys and creator function as a value. """ + registry: Dict[HWFusedPatternNames, Callable[[], GraphPattern]] = {} if backend == BackendType.ONNX: from nncf.onnx.hardware.fused_patterns import ONNX_HW_FUSED_PATTERNS - return ONNX_HW_FUSED_PATTERNS.registry_dict + registry = ONNX_HW_FUSED_PATTERNS.registry_dict + return registry if backend == BackendType.OPENVINO: from nncf.openvino.hardware.fused_patterns import OPENVINO_HW_FUSED_PATTERNS - return OPENVINO_HW_FUSED_PATTERNS.registry_dict + registry = OPENVINO_HW_FUSED_PATTERNS.registry_dict + return registry if backend == BackendType.TORCH: from nncf.torch.hardware.fused_patterns import PT_HW_FUSED_PATTERNS - return PT_HW_FUSED_PATTERNS.registry_dict + registry = PT_HW_FUSED_PATTERNS.registry_dict + return registry raise ValueError(f"Hardware-fused patterns not implemented for {backend} backend.") @staticmethod @@ -58,23 +62,29 @@ def _get_backend_ignored_patterns_map( :param backend: BackendType instance. :return: Dictionary with the HWFusedPatternNames instance as keys and creator function as a value. """ + registry: Dict[IgnoredPatternNames, Callable[[], GraphPattern]] = {} if backend == BackendType.ONNX: from nncf.onnx.quantization.ignored_patterns import ONNX_IGNORED_PATTERNS - return ONNX_IGNORED_PATTERNS.registry_dict + registry = ONNX_IGNORED_PATTERNS.registry_dict + return registry if backend == BackendType.OPENVINO: from nncf.openvino.quantization.ignored_patterns import OPENVINO_IGNORED_PATTERNS - return OPENVINO_IGNORED_PATTERNS.registry_dict + registry = OPENVINO_IGNORED_PATTERNS.registry_dict + return registry if backend == BackendType.TORCH: from nncf.torch.quantization.ignored_patterns import PT_IGNORED_PATTERNS - return PT_IGNORED_PATTERNS.registry_dict + registry = PT_IGNORED_PATTERNS.registry_dict + return registry raise ValueError(f"Ignored patterns not implemented for {backend} backend.") @staticmethod def _filter_patterns( - patterns_to_filter: Dict[PatternNames, Callable[[], GraphPattern]], device: TargetDevice, model_type: ModelType + patterns_to_filter: Dict[PatternNames, Callable[[], GraphPattern]], + device: TargetDevice, + model_type: Optional[ModelType] = None, ) -> Dict[PatternNames, Callable[[], GraphPattern]]: """ Returns all patterns from patterns_to_filter that are satisfied device and model_type parameters. @@ -98,7 +108,7 @@ def _filter_patterns( def _get_full_pattern_graph( backend_patterns_map: Dict[PatternNames, Callable[[], GraphPattern]], device: TargetDevice, - model_type: ModelType, + model_type: Optional[ModelType] = None, ) -> GraphPattern: """ Filters patterns and returns GraphPattern with registered filtered patterns. @@ -127,7 +137,9 @@ def get_full_hw_pattern_graph( :param model_type: ModelType instance. :return: Completed GraphPattern based on the backend, device & model_type. """ - backend_patterns_map = PatternsManager._get_backend_hw_patterns_map(backend) + backend_patterns_map = cast( + Dict[PatternNames, Callable[[], GraphPattern]], PatternsManager._get_backend_hw_patterns_map(backend) + ) return PatternsManager._get_full_pattern_graph(backend_patterns_map, device, model_type) @staticmethod @@ -143,5 +155,7 @@ def get_full_ignored_pattern_graph( :param model_type: ModelType instance. :return: Completed GraphPattern with registered value based on the backend, device & model_type. """ - backend_patterns_map = PatternsManager._get_backend_ignored_patterns_map(backend) + backend_patterns_map = cast( + Dict[PatternNames, Callable[[], GraphPattern]], PatternsManager._get_backend_ignored_patterns_map(backend) + ) return PatternsManager._get_full_pattern_graph(backend_patterns_map, device, model_type) diff --git a/nncf/common/graph/patterns/patterns.py b/nncf/common/graph/patterns/patterns.py index 98da197dda7..89f7f2cd749 100644 --- a/nncf/common/graph/patterns/patterns.py +++ b/nncf/common/graph/patterns/patterns.py @@ -11,12 +11,13 @@ import copy import itertools as it import os +import pathlib from dataclasses import dataclass from enum import Enum -from typing import Dict, Hashable, List, Optional, Tuple +from typing import Any, Callable, Dict, Hashable, List, Optional, Tuple, cast -import networkx as nx -import networkx.algorithms.isomorphism as ism +import networkx as nx # type: ignore +import networkx.algorithms.isomorphism as ism # type: ignore import nncf from nncf.common.utils.dot_file_rw import write_dot_graph @@ -32,8 +33,8 @@ class Patterns: during the quantization algorithm. """ - def __init__(self): - self._patterns_dict = {} + def __init__(self) -> None: + self._patterns_dict: Dict[str, GraphPattern] = {} self._full_pattern_graph = GraphPattern() def register(self, pattern: "GraphPattern", name: str, match: bool = True) -> None: @@ -83,7 +84,7 @@ class GraphPattern: NON_PATTERN_NODE_TYPE = "NON_PATTERN_NODE" PATTERN_NODE_TO_EXCLUDE = "PATTERN_NODE_TO_EXCLUDE" - def __init__(self): + def __init__(self) -> None: self._graph = nx.DiGraph() self._node_counter = 0 @@ -130,8 +131,9 @@ def __or__(self, other: "GraphPattern") -> "GraphPattern": new_pattern._unite_with_copy_of_graph(other.graph) return new_pattern - def __eq__(self, other: "GraphPattern") -> bool: - return ism.is_isomorphic(self._graph, other.graph) + def __eq__(self, other: object) -> bool: + is_isomorphic: Callable[[Any, Any], bool] = ism.is_isomorphic + return isinstance(other, GraphPattern) and is_isomorphic(self._graph, other.graph) @property def graph(self) -> nx.DiGraph: @@ -232,27 +234,27 @@ def join_patterns(self, other: "GraphPattern", edges: Optional[List[Tuple[Hashab remapped_edges.append(new_edge) self._graph.add_edges_from(remapped_edges) - def add_node(self, **attrs) -> int: + def add_node(self, **attrs: Dict[str, Any]) -> int: if GraphPattern.METATYPE_ATTR in attrs and not isinstance(attrs[GraphPattern.METATYPE_ATTR], list): - attrs[GraphPattern.METATYPE_ATTR] = [attrs[GraphPattern.METATYPE_ATTR]] + attrs[GraphPattern.METATYPE_ATTR] = cast(Any, [attrs[GraphPattern.METATYPE_ATTR]]) self._graph.add_node(self._node_counter, **attrs) self._node_counter += 1 return self._node_counter - 1 - def add_edge(self, u_name, v_name) -> None: + def add_edge(self, u_name: str, v_name: str) -> None: self._graph.add_edge(u_name, v_name) - def add_edges_from(self, ebunch_to_add, **attr) -> None: + def add_edges_from(self, ebunch_to_add: List[Any], **attr: Dict[str, Any]) -> None: self._graph.add_edges_from(ebunch_to_add, **attr) def get_weakly_connected_subgraphs(self) -> List[nx.DiGraph]: return [self._graph.subgraph(c) for c in nx.weakly_connected_components(self._graph)] def dump_graph(self, path: str) -> None: - write_dot_graph(self._graph, path) + write_dot_graph(self._graph, pathlib.Path(path)) -def merge_two_types_of_operations(first_op: Dict, second_op: Dict, label: str) -> Dict: +def merge_two_types_of_operations(first_op: Dict[str, Any], second_op: Dict[str, Any], label: str) -> Dict[str, Any]: if GraphPattern.METATYPE_ATTR in first_op and GraphPattern.METATYPE_ATTR in second_op: res = {GraphPattern.METATYPE_ATTR: first_op[GraphPattern.METATYPE_ATTR]} res[GraphPattern.METATYPE_ATTR].extend(second_op[GraphPattern.METATYPE_ATTR]) @@ -277,7 +279,7 @@ class PatternDesc: name: str devices: Optional[List[TargetDevice]] = None - model_types: Optional[List[TargetDevice]] = None + model_types: Optional[List[ModelType]] = None class HWFusedPatternNames(Enum): diff --git a/nncf/common/graph/transformations/commands.py b/nncf/common/graph/transformations/commands.py index c03b9987e7c..7128f2793c0 100644 --- a/nncf/common/graph/transformations/commands.py +++ b/nncf/common/graph/transformations/commands.py @@ -163,7 +163,7 @@ def get_state(self) -> Dict[str, Any]: """ return {self._state_names.TARGET_TYPE: self._target_type.get_state()} - def is_weight_target_point(self): + def is_weight_target_point(self) -> bool: return self._target_type == TargetType.OPERATION_WITH_WEIGHTS @classmethod diff --git a/nncf/common/graph/transformations/layout.py b/nncf/common/graph/transformations/layout.py index 2698315075b..c6e9334af6a 100644 --- a/nncf/common/graph/transformations/layout.py +++ b/nncf/common/graph/transformations/layout.py @@ -23,11 +23,11 @@ class TransformationLayout: addresses these issues. """ - def __init__(self): + def __init__(self) -> None: """ Initialize Transformation Layout. """ - self._transformations = [] + self._transformations: List[TransformationCommand] = [] @property def transformations(self) -> List[TransformationCommand]: diff --git a/nncf/common/graph/utils.py b/nncf/common/graph/utils.py index a49bcf031a0..b21c3b013da 100644 --- a/nncf/common/graph/utils.py +++ b/nncf/common/graph/utils.py @@ -10,7 +10,7 @@ # limitations under the License. from functools import partial -from typing import List, Set +from typing import List, Set, Tuple, Type, Union from nncf.common.graph import NNCFGraph from nncf.common.graph import NNCFNode @@ -89,7 +89,9 @@ def get_split_axis(input_shapes: List[List[int]], output_shapes: List[List[int]] def get_number_of_quantized_ops( - graph: NNCFGraph, quantizer_metatypes: List[OperatorMetatype], quantizable_metatypes: List[OperatorMetatype] + graph: NNCFGraph, + quantizer_metatypes: List[Type[OperatorMetatype]], + quantizable_metatypes: List[Type[OperatorMetatype]], ) -> int: """ Returns the number of quantized operations in the graph. @@ -114,3 +116,19 @@ def get_number_of_quantized_ops( else: nodes_to_see.extend(graph.get_next_nodes(node)) return len(quantized_ops) + + +def get_reduction_axes( + channel_axes: Union[List[int], Tuple[int, ...]], shape: Union[List[int], Tuple[int, ...]] +) -> Tuple[int, ...]: + """ + Returns filtered reduction axes without axes that correspond to channels. + + :param channel_axes: Channel axes. + :param shape: Shape that need to be filtered. + :return: Reduction axes. + """ + reduction_axes = list(range(len(shape))) + for channel_axis in sorted(channel_axes, reverse=True): + del reduction_axes[channel_axis] + return tuple(reduction_axes) diff --git a/nncf/common/quantization/initialization/range.py b/nncf/common/quantization/initialization/range.py index 93a248298e4..a015e92b5cd 100644 --- a/nncf/common/quantization/initialization/range.py +++ b/nncf/common/quantization/initialization/range.py @@ -9,12 +9,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, List, Optional +from typing import Dict, List, Optional, Tuple, Union +from nncf.common.graph.utils import get_reduction_axes from nncf.common.initialization.dataloader import NNCFDataLoader from nncf.common.quantization.structs import QuantizationScheme from nncf.common.quantization.structs import QuantizerGroup +from nncf.common.tensor_statistics.collectors import ReductionAxes from nncf.config.schemata.defaults import NUM_INIT_SAMPLES +from nncf.experimental.common.tensor_statistics.collectors import AggregationAxes class RangeInitConfig: @@ -204,3 +207,51 @@ def use_means_of_mins(self) -> bool: @property def use_means_of_maxs(self) -> bool: return not self._is_weights and not self._is_per_channel + + def _get_reduction_axes( + self, + shape_to_reduce: Union[Tuple[int, ...], List[int]], + quantization_axes: Union[Tuple[int, ...], List[int]], + aggregation_axes: Union[Tuple[int, ...], List[int]], + ): + """ + Returns axes for a reducer regarding aggregation axes. As aggregator takes axes counting from stacked tensors, + from these axes only tensor related axes should be used for reducer. + + :param shape_to_reduce: Shape of a reduced tensor. + :param quantization_axes: Axes of quantization. + :param aggregation_axes: Axes of aggregator which is applied onto reduced tensor. + :return: Axes for reducer. + """ + axes_to_keep = set(el - 1 for el in aggregation_axes if el != 0) + axes_to_keep.update(quantization_axes) + return get_reduction_axes(axes_to_keep, shape_to_reduce) + + def _get_aggregation_axes(self, batchwise_statistics: bool) -> Tuple[int, ...]: + """ + Returns axes for aggregator. + + :param batchwise_statistics: Determines whether quantizer statistics should be calculated + for each item of the batch or for the entire batch. + :return Tuple[int]: Aggregation axes. + """ + return (0, 1) if batchwise_statistics else (0,) + + def get_reduction_aggregation_axes( + self, + shape_to_reduce: Union[Tuple[int, ...], List[int]], + quantization_axes: Union[Tuple[int, ...], List[int]], + batchwise_statistics: bool, + ) -> Tuple[ReductionAxes, AggregationAxes]: + """ + Calculates the reduction axes, aggregation axes for the tensor. + + :param shape_to_reduce: Shape of the tensor. + :param quantization_axes: Quantization axes if per-channel quantization. + :param batchwise_statistics: Determines whether quantizer statistics should be calculated + for each item of the batch or for the entire batch. + :return: Reduction axes and aggregation axes. + """ + aggregation_axes = self._get_aggregation_axes(batchwise_statistics) + reduction_axes = self._get_reduction_axes(shape_to_reduce, quantization_axes, aggregation_axes) + return reduction_axes, aggregation_axes diff --git a/nncf/common/quantization/structs.py b/nncf/common/quantization/structs.py index aa898b76f23..b7f198496c1 100644 --- a/nncf/common/quantization/structs.py +++ b/nncf/common/quantization/structs.py @@ -19,6 +19,7 @@ from nncf.common.utils.api_marker import api from nncf.config.schemata.defaults import QUANTIZATION_BITS from nncf.config.schemata.defaults import QUANTIZATION_PER_CHANNEL +from nncf.parameters import StrEnum from nncf.parameters import TargetDevice @@ -327,7 +328,7 @@ class UnifiedScaleType(Enum): @api(canonical_alias="nncf.QuantizationPreset") -class QuantizationPreset(Enum): +class QuantizationPreset(StrEnum): """ An enum with values corresponding to the available quantization presets. """ diff --git a/nncf/common/sparsity/schedulers.py b/nncf/common/sparsity/schedulers.py index b8acc2bc809..c23131f7c0a 100644 --- a/nncf/common/sparsity/schedulers.py +++ b/nncf/common/sparsity/schedulers.py @@ -133,7 +133,7 @@ def __init__(self, controller: SparsityController, params: Dict[str, Any]): self._update_per_optimizer_step = params.get( "update_per_optimizer_step", SPARSITY_SCHEDULER_UPDATE_PER_OPTIMIZER_STEP ) - self._steps_per_epoch = params.get("steps_per_epoch", None) + self._steps_per_epoch = params.get("steps_per_epoch") self._should_skip = False def step(self, next_step: Optional[int] = None) -> None: diff --git a/nncf/common/tensor_statistics/aggregator.py b/nncf/common/tensor_statistics/aggregator.py index cc896b0bee5..62b0b371cb6 100644 --- a/nncf/common/tensor_statistics/aggregator.py +++ b/nncf/common/tensor_statistics/aggregator.py @@ -11,7 +11,7 @@ from abc import ABC from abc import abstractmethod from itertools import islice -from typing import Any, Dict, TypeVar +from typing import Any, Dict, Optional, TypeVar import nncf from nncf.common import factory @@ -25,6 +25,10 @@ TensorType = TypeVar("TensorType") TModel = TypeVar("TModel") +EMPTY_DATASET_ERROR = ( + "Calibration dataset must not be empty. Please provide calibration dataset with at least one sample." +) + class StatisticsAggregator(ABC): """ @@ -36,6 +40,17 @@ def __init__(self, dataset: Dataset): self.stat_subset_size = None self.statistic_points = StatisticPointsContainer() + def _get_iterations_number(self) -> Optional[int]: + """ + Returns number of iterations. + + :return: Number of iterations for statistics collection. + """ + dataset_length = self.dataset.get_length() + if dataset_length and self.stat_subset_size: + return min(dataset_length, self.stat_subset_size) + return dataset_length or self.stat_subset_size + def collect_statistics(self, model: TModel, graph: NNCFGraph) -> None: """ Collects statistics for registered StatisticPoints. @@ -46,24 +61,17 @@ def collect_statistics(self, model: TModel, graph: NNCFGraph) -> None: """ if not self.statistic_points: return - model_transformer = factory.ModelTransformerFactory.create(model) - merged_statistics = self._get_merged_statistic_points(self.statistic_points, model, graph) transformation_layout = self._get_transformation_layout_extra_outputs(merged_statistics) model_with_outputs = model_transformer.transform(transformation_layout) engine = factory.EngineFactory.create(model_with_outputs) - dataset_length = self.dataset.get_length() - total = ( - min(dataset_length or self.stat_subset_size, self.stat_subset_size) - if self.stat_subset_size is not None - else None - ) + iterations_number = self._get_iterations_number() empty_statistics = True for input_data in track( - islice(self.dataset.get_inference_data(), self.stat_subset_size), - total=total, + islice(self.dataset.get_inference_data(), iterations_number), + total=iterations_number, description="Statistics collection", ): outputs = engine.infer(input_data) @@ -71,9 +79,7 @@ def collect_statistics(self, model: TModel, graph: NNCFGraph) -> None: self._register_statistics(processed_outputs, merged_statistics) empty_statistics = False if empty_statistics: - raise nncf.ValidationError( - "Calibration dataset must not be empty. Please provide calibration dataset with at least one sample." - ) + raise nncf.ValidationError(EMPTY_DATASET_ERROR) def register_statistic_points(self, statistic_points: StatisticPointsContainer) -> None: """ diff --git a/nncf/common/utils/helpers.py b/nncf/common/utils/helpers.py index a4e5655b079..136b49f5fd1 100644 --- a/nncf/common/utils/helpers.py +++ b/nncf/common/utils/helpers.py @@ -16,6 +16,8 @@ from tabulate import tabulate +from nncf.common.utils.os import is_windows + def create_table( header: List[str], @@ -35,6 +37,9 @@ def create_table( if not rows: # For empty rows max_col_widths raises IndexError max_col_widths = None + if is_windows(): + # Not all terminals on Windows supports any format of table + table_fmt = "grid" return tabulate(tabular_data=rows, headers=header, tablefmt=table_fmt, maxcolwidths=max_col_widths, floatfmt=".3f") diff --git a/nncf/common/utils/registry.py b/nncf/common/utils/registry.py index b5633bd9ffa..e165a3b60f6 100644 --- a/nncf/common/utils/registry.py +++ b/nncf/common/utils/registry.py @@ -25,7 +25,7 @@ def registry_dict(self): def values(self): return self._registry_dict.values() - def _register(self, obj, name): + def _register(self, obj, name: str): if name in self._registry_dict: raise KeyError("{} is already registered in {}".format(name, self._name)) self._registry_dict[name] = obj diff --git a/nncf/config/schemata/algo/filter_pruning.py b/nncf/config/schemata/algo/filter_pruning.py index 31c184346ac..25efa70c930 100644 --- a/nncf/config/schemata/algo/filter_pruning.py +++ b/nncf/config/schemata/algo/filter_pruning.py @@ -27,7 +27,11 @@ from nncf.config.schemata.defaults import PRUNING_INTERLAYER_RANKING_TYPE from nncf.config.schemata.defaults import PRUNING_LEGR_GENERATIONS from nncf.config.schemata.defaults import PRUNING_LEGR_MAX_PRUNING +from nncf.config.schemata.defaults import PRUNING_LEGR_MUTATE_PERCENT +from nncf.config.schemata.defaults import PRUNING_LEGR_NUM_SAMPLES +from nncf.config.schemata.defaults import PRUNING_LEGR_POPULATION_SIZE from nncf.config.schemata.defaults import PRUNING_LEGR_RANDOM_SEED +from nncf.config.schemata.defaults import PRUNING_LEGR_SIGMA_SCALE from nncf.config.schemata.defaults import PRUNING_LEGR_TRAIN_STEPS from nncf.config.schemata.defaults import PRUNING_NUM_INIT_STEPS from nncf.config.schemata.defaults import PRUNING_SCHEDULE @@ -162,6 +166,26 @@ description="Random seed for LeGR coefficients generation.", default=PRUNING_LEGR_RANDOM_SEED, ), + "population_size": with_attributes( + NUMBER, + description="Size of population for the evolution algorithm.", + default=PRUNING_LEGR_POPULATION_SIZE, + ), + "num_samples": with_attributes( + NUMBER, + description="Number of samples for the evolution algorithm.", + default=PRUNING_LEGR_NUM_SAMPLES, + ), + "mutate_percent": with_attributes( + NUMBER, + description="Percent of mutate for the evolution algorithm.", + default=PRUNING_LEGR_MUTATE_PERCENT, + ), + "scale_sigma": with_attributes( + NUMBER, + description="Scale sigma for the evolution algorithm.", + default=PRUNING_LEGR_SIGMA_SCALE, + ), }, "additionalProperties": False, }, diff --git a/nncf/config/schemata/defaults.py b/nncf/config/schemata/defaults.py index d1a6471a768..3bf599fce95 100644 --- a/nncf/config/schemata/defaults.py +++ b/nncf/config/schemata/defaults.py @@ -59,6 +59,10 @@ PRUNING_LEGR_TRAIN_STEPS = 200 PRUNING_LEGR_MAX_PRUNING = 0.8 PRUNING_LEGR_RANDOM_SEED = 42 +PRUNING_LEGR_POPULATION_SIZE = 64 +PRUNING_LEGR_NUM_SAMPLES = 16 +PRUNING_LEGR_MUTATE_PERCENT = 0.1 +PRUNING_LEGR_SIGMA_SCALE = 1 SPARSITY_INIT = 0.0 MAGNITUDE_SPARSITY_WEIGHT_IMPORTANCE = "normed_abs" diff --git a/nncf/data/dataset.py b/nncf/data/dataset.py index a6b7f626855..775dba50a87 100644 --- a/nncf/data/dataset.py +++ b/nncf/data/dataset.py @@ -81,6 +81,17 @@ def get_length(self) -> Optional[int]: return self._data_source.__len__() return None + def get_batch_size(self) -> Optional[int]: + """ + Tries to fetch batch size of the underlying dataset. + :return: The value of batch_size or _batch_size attributes of the data_source if exist, and None otherwise. + """ + if hasattr(self._data_source, "batch_size"): # Torch dataloader + return self._data_source.batch_size + if hasattr(self._data_source, "_batch_size"): # TF dataloader + return self._data_source._batch_size + return None + class DataProvider(Generic[DataItem, ModelInput]): def __init__( diff --git a/nncf/experimental/common/tensor_statistics/collectors.py b/nncf/experimental/common/tensor_statistics/collectors.py index 1017877bf24..700cabb63a3 100644 --- a/nncf/experimental/common/tensor_statistics/collectors.py +++ b/nncf/experimental/common/tensor_statistics/collectors.py @@ -461,7 +461,9 @@ def __init__(self, tensor_collectors: List[TensorCollector]) -> None: self._aggregators[key] = unique_aggregator -##################################################Reducers################################################## +################################################## +# Reducers +################################################## class NoopReducer(TensorReducerBase): @@ -578,7 +580,9 @@ def __hash__(self) -> int: return hash((self.__class__.__name__, self.inplace, self._reduction_axes, self._channel_axis)) -##################################################Aggregators################################################## +################################################## +# Aggregators +################################################## class NoopAggregator(AggregatorBase): diff --git a/nncf/experimental/common/tensor_statistics/statistical_functions.py b/nncf/experimental/common/tensor_statistics/statistical_functions.py index ea49c58ecd1..1253dfb8279 100644 --- a/nncf/experimental/common/tensor_statistics/statistical_functions.py +++ b/nncf/experimental/common/tensor_statistics/statistical_functions.py @@ -9,22 +9,27 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Optional + from nncf.experimental.tensor import Tensor +from nncf.experimental.tensor import TensorDataType from nncf.experimental.tensor.functions import numeric as fns -def mean_per_channel(x: Tensor, axis: int) -> Tensor: +def mean_per_channel(x: Tensor, axis: int, dtype: Optional[TensorDataType] = None) -> Tensor: """ Computes the mean of elements across given channel dimension of Tensor. :param x: Tensor to reduce. :param axis: The channel dimensions to reduce. + :param dtype: Type to use in computing the mean. :return: Reduced Tensor. """ if len(x.shape) < 3: - return fns.mean(x, axis=0) + return fns.mean(x, axis=0, dtype=dtype) + pos_axis = axis + x.ndim if axis < 0 else axis if pos_axis < 0 or pos_axis >= x.ndim: raise ValueError(f"axis {axis} is out of bounds for array of dimension {x.ndim}") axis = tuple(i for i in range(x.ndim) if i != pos_axis) - return fns.mean(x, axis=axis) + return fns.mean(x, axis=axis, dtype=dtype) diff --git a/nncf/experimental/tensor/functions/numeric.py b/nncf/experimental/tensor/functions/numeric.py index 3dd2c0d8815..95547fa6bea 100644 --- a/nncf/experimental/tensor/functions/numeric.py +++ b/nncf/experimental/tensor/functions/numeric.py @@ -355,16 +355,19 @@ def moveaxis(a: Tensor, source: Union[int, Tuple[int, ...]], destination: Union[ @functools.singledispatch @tensor_guard -def mean(a: Tensor, axis: Optional[Union[int, Tuple[int, ...]]] = None, keepdims: bool = False) -> Tensor: +def mean( + a: Tensor, axis: Optional[Union[int, Tuple[int, ...]]] = None, keepdims: bool = False, dtype: TensorDataType = None +) -> Tensor: """ Compute the arithmetic mean along the specified axis. :param a: Array containing numbers whose mean is desired. :param axis: Axis or axes along which the means are computed. :param keepdims: Destination positions for each of the original axes. These must also be unique. + :param dtype: Type to use in computing the mean. :return: Array with moved axes. """ - return Tensor(mean(a.data, axis, keepdims)) + return Tensor(mean(a.data, axis, keepdims, dtype)) @functools.singledispatch diff --git a/nncf/experimental/tensor/functions/numpy_numeric.py b/nncf/experimental/tensor/functions/numpy_numeric.py index 3aef3df1e5f..a975c58072f 100644 --- a/nncf/experimental/tensor/functions/numpy_numeric.py +++ b/nncf/experimental/tensor/functions/numpy_numeric.py @@ -170,8 +170,14 @@ def _(a: np.ndarray, source: Union[int, Tuple[int, ...]], destination: Union[int @register_numpy_types(numeric.mean) -def _(a: Union[np.ndarray, np.generic], axis: Union[int, Tuple[int, ...]] = None, keepdims: bool = False) -> np.ndarray: - return np.array(np.mean(a, axis=axis, keepdims=keepdims)) +def _( + a: Union[np.ndarray, np.generic], + axis: Union[int, Tuple[int, ...]] = None, + keepdims: bool = False, + dtype: Optional[TensorDataType] = None, +) -> np.ndarray: + dtype = DTYPE_MAP[dtype] if dtype else None + return np.array(np.mean(a, axis=axis, keepdims=keepdims, dtype=dtype)) @register_numpy_types(numeric.round) diff --git a/nncf/experimental/tensor/functions/torch_numeric.py b/nncf/experimental/tensor/functions/torch_numeric.py index 781e1ce49e8..5b04a5a5aa2 100644 --- a/nncf/experimental/tensor/functions/torch_numeric.py +++ b/nncf/experimental/tensor/functions/torch_numeric.py @@ -183,8 +183,14 @@ def _(a: torch.Tensor, source: Union[int, Tuple[int, ...]], destination: Union[i @numeric.mean.register(torch.Tensor) -def _(a: torch.Tensor, axis: Union[int, Tuple[int, ...]] = None, keepdims: bool = False) -> torch.Tensor: - return torch.mean(a, dim=axis, keepdim=keepdims) +def _( + a: torch.Tensor, + axis: Union[int, Tuple[int, ...]] = None, + keepdims: bool = False, + dtype: Optional[TensorDataType] = None, +) -> torch.Tensor: + dtype = DTYPE_MAP[dtype] if dtype else None + return torch.mean(a, dim=axis, keepdim=keepdims, dtype=dtype) @numeric.round.register(torch.Tensor) diff --git a/nncf/experimental/torch/nas/bootstrapNAS/search/search.py b/nncf/experimental/torch/nas/bootstrapNAS/search/search.py index d5ca9cd55e1..23800b019b5 100644 --- a/nncf/experimental/torch/nas/bootstrapNAS/search/search.py +++ b/nncf/experimental/torch/nas/bootstrapNAS/search/search.py @@ -710,9 +710,8 @@ def _evaluate(self, x: List[float], out: Dict[str, Any], *args, **kargs) -> NoRe result = [sample] - eval_idx = 0 bn_adaption_executed = False - for evaluator_handler in self._evaluator_handlers: + for eval_idx, evaluator_handler in enumerate(self._evaluator_handlers): in_cache, value = evaluator_handler.retrieve_from_cache(tuple(x_i)) if not in_cache: if not bn_adaption_executed and self._search.bn_adaptation is not None: @@ -720,7 +719,6 @@ def _evaluate(self, x: List[float], out: Dict[str, Any], *args, **kargs) -> NoRe bn_adaption_executed = True value = evaluator_handler.evaluate_and_add_to_cache_from_pymoo(tuple(x_i)) evaluators_arr[eval_idx].append(value) - eval_idx += 1 result.append(evaluator_handler.name) result.append(value) diff --git a/nncf/experimental/torch/nas/bootstrapNAS/training/scheduler.py b/nncf/experimental/torch/nas/bootstrapNAS/training/scheduler.py index 3579dbb6dff..9dafe0d0ee9 100644 --- a/nncf/experimental/torch/nas/bootstrapNAS/training/scheduler.py +++ b/nncf/experimental/torch/nas/bootstrapNAS/training/scheduler.py @@ -191,12 +191,10 @@ def get_current_stage_desc(self) -> Tuple[Optional[StageDescriptor], int]: :return: current stage descriptor and its index in the list of all descriptors """ partial_epochs = 0 - stage_desc_idx = 0 - for stage_desc in self.list_stage_descriptors: + for stage_desc_idx, stage_desc in enumerate(self.list_stage_descriptors): partial_epochs += stage_desc.epochs if self.current_epoch < partial_epochs: return stage_desc, stage_desc_idx - stage_desc_idx += 1 return None, -1 def get_total_training_epochs(self) -> int: diff --git a/nncf/experimental/torch/sparsity/movement/scheduler.py b/nncf/experimental/torch/sparsity/movement/scheduler.py index 4274e4206e5..33c8d9d51da 100644 --- a/nncf/experimental/torch/sparsity/movement/scheduler.py +++ b/nncf/experimental/torch/sparsity/movement/scheduler.py @@ -97,16 +97,16 @@ def from_dict(cls, params: Dict[str, Any]) -> "MovementSchedulerParams": :param params: A dict that specifies the parameters of movement sparsity scheduler. :return: A `MovementSchedulerParams` object that stores the parameters from `params`. """ - warmup_start_epoch: int = params.get("warmup_start_epoch", None) - warmup_end_epoch: int = params.get("warmup_end_epoch", None) - importance_regularization_factor: float = params.get("importance_regularization_factor", None) + warmup_start_epoch: int = params.get("warmup_start_epoch") + warmup_end_epoch: int = params.get("warmup_end_epoch") + importance_regularization_factor: float = params.get("importance_regularization_factor") enable_structured_masking: bool = params.get("enable_structured_masking", MOVEMENT_ENABLE_STRUCTURED_MASKING) - init_importance_threshold: Optional[float] = params.get("init_importance_threshold", None) + init_importance_threshold: Optional[float] = params.get("init_importance_threshold") final_importance_threshold: float = params.get( "final_importance_threshold", MOVEMENT_FINAL_IMPORTANCE_THRESHOLD ) power: float = params.get("power", MOVEMENT_POWER) - steps_per_epoch: Optional[int] = params.get("steps_per_epoch", None) + steps_per_epoch: Optional[int] = params.get("steps_per_epoch") if None in [warmup_start_epoch, warmup_end_epoch, importance_regularization_factor]: raise ValueError( diff --git a/nncf/onnx/graph/metatypes/groups.py b/nncf/onnx/graph/metatypes/groups.py index d24c921364f..561e1340595 100644 --- a/nncf/onnx/graph/metatypes/groups.py +++ b/nncf/onnx/graph/metatypes/groups.py @@ -117,14 +117,25 @@ # Contains the operation metatypes for which bias can be applied. -OPERATIONS_WITH_BIAS = [ +OPERATIONS_WITH_BIAS_REDUCED = [ onnx_metatypes.ONNXConvolutionMetatype, onnx_metatypes.ONNXGemmMetatype, # TODO: Need to add MatMul with the separate bias support (CVS-135433) ] +OPERATIONS_WITH_BIAS = [*OPERATIONS_WITH_BIAS_REDUCED, onnx_metatypes.ONNXDepthwiseConvolutionMetatype] + QUANTIZE_DEQUANTIZE_OPERATIONS = [ onnx_metatypes.ONNXQuantizeLinearMetatype, onnx_metatypes.ONNXDequantizeLinearMetatype, ] + +# These metatypes mix outputs for different samples into one axis. +# If reducers and aggregators collect statistics at the output of the following operations, +# assuming that 0-axis is batch axis, they get only 1 value instead of batch_size values. +# It could lead to inaccurate/incorrect statistics result. +OPERATIONS_OUTPUT_HAS_NO_BATCH_AXIS = [ + onnx_metatypes.ONNXROIAlignMetatype, + onnx_metatypes.ONNXEmbeddingMetatype, +] diff --git a/nncf/onnx/graph/metatypes/onnx_metatypes.py b/nncf/onnx/graph/metatypes/onnx_metatypes.py index 65c162e192e..2105f31a216 100644 --- a/nncf/onnx/graph/metatypes/onnx_metatypes.py +++ b/nncf/onnx/graph/metatypes/onnx_metatypes.py @@ -71,7 +71,7 @@ class ONNXOpWithWeightsMetatype(ONNXOpMetatype): bias_port_id: Optional[int] = None -@ONNX_OPERATION_METATYPES.register() +@ONNX_OPERATION_METATYPES.register(is_subtype=True) class ONNXDepthwiseConvolutionMetatype(ONNXOpWithWeightsMetatype): name = "DepthwiseConvOp" op_names = ["Conv"] @@ -86,7 +86,7 @@ def matches(cls, model: onnx.ModelProto, node: onnx.NodeProto) -> bool: return _is_depthwise_conv(model, node) -@ONNX_OPERATION_METATYPES.register() +@ONNX_OPERATION_METATYPES.register(is_subtype=True) class ONNXGroupConvolutionMetatype(ONNXOpWithWeightsMetatype): name = "GroupConvOp" op_names = ["Conv"] @@ -130,7 +130,7 @@ class ONNXGemmMetatype(ONNXOpWithWeightsMetatype): name = "GemmOp" op_names = ["Gemm"] hw_config_names = [HWConfigOpName.MATMUL] - weight_channel_axis = -1 + weight_channel_axis = -1 # For port_id=1 weight_port_ids = None bias_port_id = 2 possible_weight_ports = [0, 1] @@ -142,7 +142,7 @@ class ONNXMatMulMetatype(ONNXOpMetatype): name = "MatMulOp" op_names = ["MatMul"] hw_config_names = [HWConfigOpName.MATMUL] - weight_channel_axis = -1 + weight_channel_axis = -1 # For port_id=1 weight_port_ids = None bias_port_id = 2 possible_weight_ports = [0, 1] @@ -420,7 +420,7 @@ class ONNXReciprocalMetatype(ONNXOpMetatype): hw_config_names = [HWConfigOpName.POWER] -@ONNX_OPERATION_METATYPES.register() +@ONNX_OPERATION_METATYPES.register(is_subtype=True) class ONNXEmbeddingMetatype(ONNXOpMetatype): name = "EmbeddingOp" hw_config_names = [HWConfigOpName.EMBEDDING] @@ -463,8 +463,8 @@ class ONNXScatterNDMetatype(ONNXOpMetatype): @ONNX_OPERATION_METATYPES.register() -class ONNXRoiAlignMetatype(ONNXOpMetatype): - name = "RoiAlignOp" +class ONNXROIAlignMetatype(ONNXOpMetatype): + name = "ROIAlignOp" op_names = ["RoiAlign"] @@ -749,7 +749,7 @@ def _is_depthwise_conv(model: onnx.ModelProto, node: onnx.NodeProto) -> bool: if attribute.name == "group": conv_group = onnx.helper.get_attribute_value(attribute) weight_tensor_value = None - initializer_name = node.input[1] + initializer_name = get_tensor_edge_name(model, node, 1, get_parents_node_mapping(model)) for init in model.graph.initializer: if init.name == initializer_name: weight_tensor_value = onnx.numpy_helper.to_array(init) diff --git a/nncf/onnx/graph/nncf_graph_builder.py b/nncf/onnx/graph/nncf_graph_builder.py index 79c29928f0c..c5c6a3a3aa3 100644 --- a/nncf/onnx/graph/nncf_graph_builder.py +++ b/nncf/onnx/graph/nncf_graph_builder.py @@ -38,7 +38,6 @@ from nncf.onnx.graph.onnx_helper import get_model_inputs from nncf.onnx.graph.onnx_helper import get_output_port_id_for_node_before_output from nncf.onnx.graph.onnx_helper import get_parents_node_mapping -from nncf.onnx.graph.onnx_helper import get_port_ids_between_nodes from nncf.onnx.graph.onnx_helper import is_node_has_shared_weight @@ -279,7 +278,6 @@ def _add_nncf_input_nodes( output_port_id=output_port_id, dtype=nncf_dtype, ) - output_port_id += 1 @staticmethod def _add_nncf_output_nodes( @@ -324,7 +322,6 @@ def _add_nncf_output_nodes( output_port_id=output_port_id, dtype=nncf_dtype, ) - input_port_id += 1 @staticmethod def convert_onnx_dtype_to_nncf_dtype(onnx_dtype: int) -> Dtype: @@ -381,32 +378,30 @@ def create_nncf_graph(onnx_model: onnx.ModelProto) -> NNCFGraph: is_shared=is_shared, ) - for output_node in onnx_model.graph.node: - output_edges = output_node.output - for output_edge in output_edges: - edge = edge_info_mapping.get(output_edge) - if edge is None: - # If the edge is None it means that the edge was not added during shape inference of ONNX model. - # BatchNorm exported in Training mode has unused outputs edges: mean, var, saved_mean, saved_var. - # NNCFGraph should not contain such edges. - continue - tensor_shape = get_edge_shape(edge) - onnx_dtype = get_edge_dtype(edge) - nncf_dtype = GraphConverter.convert_onnx_dtype_to_nncf_dtype(onnx_dtype) - output_node_id = nncf_graph.get_node_by_name(output_node.name).node_id - input_nodes = children_node_mapping[output_edge] - for input_node in input_nodes: - port_ids = get_port_ids_between_nodes(output_node, input_node) - input_port_id = port_ids["input_port_id"] - output_port_id = port_ids["output_port_id"] - in_node_id = nncf_graph.get_node_by_name(input_node.name).node_id + for node in onnx_model.graph.node: + for output_port_id, output_edge_name in enumerate(node.output): + for consumed_node in children_node_mapping[output_edge_name]: + edge = edge_info_mapping.get(output_edge_name) + if edge is None: + # If the edge is None it means that the edge was not added during shape inference of ONNX model. + # BatchNorm exported in Training mode has unused outputs edges: + # mean, var, saved_mean, saved_var. NNCFGraph should not contain such edges. + continue + input_port_id = get_input_port_id_for_node_after_input(output_edge_name, consumed_node) + + in_node_id = nncf_graph.get_node_by_name(node.name).node_id + output_node_id = nncf_graph.get_node_by_name(consumed_node.name).node_id + tensor_shape = get_edge_shape(edge) + onnx_dtype = get_edge_dtype(edge) + nncf_dtype = GraphConverter.convert_onnx_dtype_to_nncf_dtype(onnx_dtype) + nncf_graph.add_edge_between_nncf_nodes( - from_node_id=output_node_id, - to_node_id=in_node_id, + from_node_id=in_node_id, + to_node_id=output_node_id, tensor_shape=tensor_shape, input_port_id=input_port_id, output_port_id=output_port_id, - dtype=Dtype(nncf_dtype), + dtype=nncf_dtype, ) GraphConverter._add_nncf_input_nodes(onnx_model, nncf_graph, edge_info_mapping, children_node_mapping) diff --git a/nncf/onnx/graph/node_utils.py b/nncf/onnx/graph/node_utils.py index 29980abc6d9..be9909cb86a 100644 --- a/nncf/onnx/graph/node_utils.py +++ b/nncf/onnx/graph/node_utils.py @@ -18,7 +18,6 @@ from nncf.common.graph.graph import NNCFNode from nncf.common.graph.transformations.commands import TargetType from nncf.common.logging.logger import nncf_logger -from nncf.common.tensor_statistics.collectors import ReductionAxes from nncf.onnx.graph.metatypes import onnx_metatypes as om from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXDequantizeLinearMetatype from nncf.onnx.graph.onnx_helper import get_tensor_value @@ -128,29 +127,14 @@ def transpose_axis(shape: List[int], axis: int) -> int: Returns transpose axis. :param shape: Tensor shape. - :param axis: Axis before transpose. + :param axis: Axis before transpose (only positive). :return: Axis after transpose. """ - axis %= len(shape) # Make axis positive + assert axis >= 0 return range(len(shape) - 1, -1, -1)[axis] # Iterate backward throug axis -def get_reduction_shape(shape: List[int], axis: int) -> ReductionAxes: - """ - Returns reduction shape for shape and axis. - - :param shape: Shape. - :param axis: Axis. - :return: Reduction shape. - """ - reduction_shape = list(range(len(shape))) - if len(reduction_shape) == 1: # If only one channel - return tuple(reduction_shape) - reduction_shape.pop(axis) - return tuple(reduction_shape) - - -def _get_weight_quantization_axis(node: NNCFNode, port_id: int) -> int: +def get_weight_quantization_axis(node: NNCFNode, port_id: int) -> int: """ Returns weight tensor axis, along which quantizer parameters are calculated. @@ -161,6 +145,9 @@ def _get_weight_quantization_axis(node: NNCFNode, port_id: int) -> int: weight_channel_axis = node.metatype.weight_channel_axis if node.layer_attributes.has_node_attrs() and node.metatype == om.ONNXGemmMetatype: weight_shape = node.layer_attributes.weight_attrs[port_id]["shape"] + weight_channel_axis %= len(weight_shape) # Make axis positive + if port_id == 0: + weight_channel_axis -= 1 if ( port_id == 0 and node.layer_attributes.node_attrs["transA"] == 1 @@ -171,18 +158,9 @@ def _get_weight_quantization_axis(node: NNCFNode, port_id: int) -> int: return weight_channel_axis -def _get_activation_quantization_axis() -> int: - """ - Returns activation tensor axis, along which quantizer parameters are calculated. - - :return: Axis, along which quantizer parameters are calculated. - """ - return 1 # Activations have channel first layout: [N, C, Z, Y, X] - - def _get_activation_tensor_shape( nncf_graph: NNCFGraph, node: NNCFNode, target_point: ONNXTargetPoint -) -> Optional[List[int]]: +) -> Optional[Tuple[int, ...]]: """ Returns shape of an activation tensor which is correspond to the target point and node. ONNX model can not have a shape of a edge, even after shape inference. @@ -203,12 +181,12 @@ def _get_activation_tensor_shape( if target_point.type == TargetType.PRE_LAYER_OPERATION: nncf_logger.info( f"The shape of input edge of a node {node.node_name} is unkown. \ - Therefore per-tensor quantizaiton is applied." + It could lead to inaccurate statistics collection." ) elif target_point.type == TargetType.POST_LAYER_OPERATION: nncf_logger.info( f"The shape of output edge of a node {node.node_name} is unkown. \ - Therefore per-tensor quantizaiton is applied." + It could lead to inaccurate statistics collection." ) nncf_logger.info("Please consider to run pre-processing before quantization.") # TODO: add preprocessing tool for ONNX model. @@ -218,7 +196,7 @@ def _get_activation_tensor_shape( def get_quantized_tensor_shape( nncf_graph: NNCFGraph, node: NNCFNode, target_point: ONNXTargetPoint -) -> Optional[List[int]]: +) -> Optional[Tuple[int, ...]]: """ Returns quantized tensor shape corresponding to a target point with a node if shape - info is existed. If there is no shape info - returns None. @@ -231,20 +209,3 @@ def get_quantized_tensor_shape( if target_point.is_weight_target_point(): return node.layer_attributes.weight_attrs[target_point.port_id]["shape"] return _get_activation_tensor_shape(nncf_graph, node, target_point) - - -def get_quantization_axis(is_per_channel: bool, node: NNCFNode, target_point: ONNXTargetPoint) -> Optional[int]: - """ - Returns axis of quantizer parameters are calculated along. - If quantization is per-tensor returns None. - - :param is_per_channel: True if quantizater is per-channel. - :param node: NNCFNode. - :param target_point: Target point indicates the quantizer place in the model graph. - :return: None if per-tensor, otherwise quantizion axis. - """ - if not is_per_channel: - return None - if target_point.is_weight_target_point(): - return _get_weight_quantization_axis(node, target_point.port_id) - return _get_activation_quantization_axis() diff --git a/nncf/onnx/graph/onnx_helper.py b/nncf/onnx/graph/onnx_helper.py index 24cfd0c7c56..518f6323321 100644 --- a/nncf/onnx/graph/onnx_helper.py +++ b/nncf/onnx/graph/onnx_helper.py @@ -114,26 +114,6 @@ def get_output_port_id_for_node_before_output(output_name: str, from_node: onnx. raise nncf.ValidationError(f"The node {from_node} does not have output edge with the name {output_name}") -def get_port_ids_between_nodes(from_node: onnx.NodeProto, to_node: onnx.NodeProto) -> Dict[str, int]: - """ - Returns input_port_id and output_port_id between 'from_node' and 'to_node'. - - :param from_node: Node, whose output is connected to 'to_node' node. - :param to_node: Node, whose input is connected to 'from_node' node. - :return: Dict{'input_port_id': input port id, 'output_port_id': output port id} - """ - output = {"input_port_id": None, "output_port_id": None} - for port_id, port in enumerate(to_node.input): - if port in from_node.output: - output["input_port_id"] = port_id - for port_id, port in enumerate(from_node.output): - if port in to_node.input: - output["output_port_id"] = port_id - if output["output_port_id"] is None or output["input_port_id"] is None: - raise nncf.InternalError(f"The nodes {from_node.name} and {to_node.name} do not have edges between.") - return output - - def get_node_index(model: onnx.ModelProto, node_name: str) -> Optional[int]: """ Returns the node index in the model. diff --git a/nncf/onnx/quantization/quantize_model.py b/nncf/onnx/quantization/quantize_model.py index c89b39a14eb..094b98e81af 100644 --- a/nncf/onnx/quantization/quantize_model.py +++ b/nncf/onnx/quantization/quantize_model.py @@ -17,6 +17,7 @@ from nncf.common.logging.logger import nncf_logger from nncf.common.quantization.structs import QuantizationPreset from nncf.data import Dataset +from nncf.onnx.graph.metatypes.groups import OPERATIONS_OUTPUT_HAS_NO_BATCH_AXIS from nncf.onnx.graph.nncf_graph_builder import GraphConverter from nncf.parameters import DropType from nncf.parameters import ModelType @@ -30,6 +31,7 @@ from nncf.quantization.algorithms.accuracy_control.evaluator import Evaluator from nncf.quantization.algorithms.post_training.algorithm import PostTrainingQuantization from nncf.quantization.quantize_model import quantize_with_tune_hyperparams +from nncf.quantization.quantize_model import warning_model_no_batchwise_support from nncf.quantization.telemetry_extractors import CompressionStartedWithQuantizeApi from nncf.scopes import IgnoredScope from nncf.telemetry import tracked_function @@ -81,6 +83,7 @@ def quantize_impl( ) graph = GraphConverter.create_nncf_graph(model) + warning_model_no_batchwise_support(graph, advanced_parameters, model_type, OPERATIONS_OUTPUT_HAS_NO_BATCH_AXIS) quantized_model = quantization_algorithm.apply(model, graph, dataset=calibration_dataset) return quantized_model diff --git a/nncf/onnx/quantization/quantizer_parameters.py b/nncf/onnx/quantization/quantizer_parameters.py index 1a9570d758d..a063e6a7055 100644 --- a/nncf/onnx/quantization/quantizer_parameters.py +++ b/nncf/onnx/quantization/quantizer_parameters.py @@ -37,7 +37,7 @@ class ONNXQuantizerLayerParameters: def convert_fq_params_to_onnx_params( - parameters: FakeQuantizeParameters, num_bits: int, tensor_type: np.dtype, axis: Optional[int] = None + parameters: FakeQuantizeParameters, num_bits: int, tensor_type: np.dtype, axis: Tuple[int] ) -> ONNXQuantizerLayerParameters: """ Converts common FakeQuantizeParameters to ONNXQuantizerLayerParameters. @@ -45,7 +45,7 @@ def convert_fq_params_to_onnx_params( :param parameters: FakeQuantizeParameters representation. :param num_bits: Number of quantizer bits. :param tensor_type: Value type of the tensor. Could be INT8 or UINT8. - :param axis: Axis for per-channel quantization. Should be none in case of per-tensor. + :param axis: Axis for per-channel quantization. :return: Quantizer layer attributes. """ if num_bits != 8: @@ -68,6 +68,8 @@ def convert_fq_params_to_onnx_params( # ONNX demands parameters to be a scalar or 1-D Tensor. scale = np.squeeze(scale) zero_point = np.squeeze(zero_point) + # ONNX axis parameter format specification. + axis = None if not axis else axis[0] return ONNXQuantizerLayerParameters(scale.data, zero_point.data, tensor_type, axis) diff --git a/nncf/openvino/engine.py b/nncf/openvino/engine.py index 54efab783ca..d5db5fb6c94 100644 --- a/nncf/openvino/engine.py +++ b/nncf/openvino/engine.py @@ -14,7 +14,6 @@ import numpy as np import openvino.runtime as ov -import nncf from nncf.common.engine import Engine from nncf.openvino.graph.model_utils import model_has_state from nncf.parameters import TargetDevice @@ -32,30 +31,6 @@ class OVCompiledModelEngine(Engine): def __init__(self, compiled_model: ov.CompiledModel, stateful: bool): self.infer_request = compiled_model.create_infer_request() self.reset_state = stateful and hasattr(self.infer_request, "reset_state") - self.input_tensor_names = set() - self.number_of_inputs = len(compiled_model.inputs) - for model_input in compiled_model.inputs: - self.input_tensor_names.update(model_input.get_names()) - - def _check_input_data_format( - self, input_data: Union[np.ndarray, List[np.ndarray], Tuple[np.ndarray], Dict[str, np.ndarray]] - ) -> None: - """ - Checks correspondence of the model input names and the passed data. - If there is a mismatch, the method throws a more specific and readable error than - original error raised by the compiled model. - - :param input_data: Provided inputs to infer the model. - """ - actual_num_inputs = 1 if isinstance(input_data, np.ndarray) else len(input_data) - if actual_num_inputs != self.number_of_inputs: - raise nncf.ValidationError( - f"Model expects {self.number_of_inputs} inputs, but {actual_num_inputs} are provided." - ) - if isinstance(input_data, dict): - for name in input_data: - if isinstance(name, str) and name not in self.input_tensor_names: - raise nncf.ValidationError(f"Missing a required input: {name} to run the model.") def infer( self, input_data: Union[np.ndarray, List[np.ndarray], Tuple[np.ndarray], Dict[str, np.ndarray]] @@ -67,8 +42,6 @@ def infer( :param input_data: Inputs for the model. :return output_data: Model's output. """ - self._check_input_data_format(input_data) - if self.reset_state: self.infer_request.reset_state() diff --git a/nncf/openvino/graph/metatypes/groups.py b/nncf/openvino/graph/metatypes/groups.py index 38ffff12753..c19d2bae83c 100644 --- a/nncf/openvino/graph/metatypes/groups.py +++ b/nncf/openvino/graph/metatypes/groups.py @@ -198,3 +198,16 @@ ov_metatypes.OVConvolutionBackpropDataMetatype, ov_metatypes.OVGroupConvolutionBackpropDataMetatype, ] + +# These metatypes mix outputs for different samples into one axis. +# If reducers and aggregators collect statistics at the output of the following operations, +# assuming that 0-axis is batch axis, they get only 1 value instead of batch_size values. +# It could lead to inaccurate/incorrect statistics result. +OPERATIONS_OUTPUT_HAS_NO_BATCH_AXIS = [ + ov_metatypes.OVSpaceToBatchMetatype, + ov_metatypes.OVBatchToSpaceMetatype, + ov_metatypes.OVROIPoolingMetatype, + ov_metatypes.OVROIAlignMetatype, + ov_metatypes.OVEmbeddingMetatype, + ov_metatypes.OVIfMetatype, +] diff --git a/nncf/openvino/graph/metatypes/openvino_metatypes.py b/nncf/openvino/graph/metatypes/openvino_metatypes.py index c884b8621c4..eb806ddfffa 100644 --- a/nncf/openvino/graph/metatypes/openvino_metatypes.py +++ b/nncf/openvino/graph/metatypes/openvino_metatypes.py @@ -70,7 +70,7 @@ class OVConvolutionBackpropDataMetatype(OVOpMetatype): output_channel_axis = 1 -@OV_OPERATOR_METATYPES.register() +@OV_OPERATOR_METATYPES.register(is_subtype=True) class OVDepthwiseConvolutionMetatype(OVOpMetatype): name = "DepthwiseConvolutionOp" op_names = ["GroupConvolution"] @@ -410,7 +410,7 @@ class OVLogicalXorMetatype(OVOpMetatype): hw_config_names = [HWConfigOpName.LOGICALXOR] -@OV_OPERATOR_METATYPES.register() +@OV_OPERATOR_METATYPES.register(is_subtype=True) class OVEmbeddingMetatype(OVOpMetatype): name = "EmbeddingOp" hw_config_names = [HWConfigOpName.EMBEDDING] @@ -469,11 +469,17 @@ class OVLogMetatype(OVOpMetatype): @OV_OPERATOR_METATYPES.register() -class OVRoiAlignMetatype(OVOpMetatype): - name = "RoiAlignOp" +class OVROIAlignMetatype(OVOpMetatype): + name = "ROIAlignOp" op_names = ["ROIAlign"] +@OV_OPERATOR_METATYPES.register() +class OVROIPoolingMetatype(OVOpMetatype): + name = "ROIPoolingOp" + op_names = ["ROIPooling"] + + @OV_OPERATOR_METATYPES.register() class OVGatherMetatype(OVOpMetatype): name = "GatherOp" diff --git a/nncf/openvino/graph/nncf_graph_builder.py b/nncf/openvino/graph/nncf_graph_builder.py index fa9b80b8937..a780bb31832 100644 --- a/nncf/openvino/graph/nncf_graph_builder.py +++ b/nncf/openvino/graph/nncf_graph_builder.py @@ -35,31 +35,35 @@ class GraphConverter: """ @staticmethod - def convert_to_nncf_dtype(ov_dtype: str) -> Dtype: + def convert_to_nncf_dtype(ov_type: ov.Type) -> Dtype: """ Converts the primitive types from the OpenVINO domain to the NNCF domain. :param ov_dtype: OpenVINO primitive typename. :return: NNCF primitive type. """ + type_name = ov_type.get_type_name() conversion_map = { "f16": "float", "f32": "float", "f64": "float", "i4": "int", "i8": "int", + "i16": "int", "i32": "int", "i64": "int", "u1": "int", "u4": "int", "u8": "int", + "u16": "int", "u32": "int", "u64": "int", "boolean": "int", + "string": "int", } - if ov_dtype not in conversion_map: - raise NotImplementedError(f"NNCF is not yet supported OpenVINO data type: {ov_dtype}.") - return Dtype(conversion_map[ov_dtype]) + if type_name not in conversion_map: + raise NotImplementedError(f"NNCF is not yet supported OpenVINO data type: {type_name}.") + return Dtype(conversion_map[type_name]) @staticmethod def _filter_weight_input_ports(inputs: List[ov.Input], metatype: Type[OperatorMetatype]) -> List[ov.Input]: @@ -96,8 +100,7 @@ def _add_edges_to_nncf_graph(model: ov.Model, graph: NNCFGraph) -> None: for out_node, inputs in node_vs_target_inputs.items(): tensor_shape = list(out.partial_shape.get_max_shape()) output_node_id = graph.get_node_by_name(out_node.get_friendly_name()).node_id - ov_dtype = out.get_element_type().get_type_name() - nncf_dtype = GraphConverter.convert_to_nncf_dtype(ov_dtype) + nncf_dtype = GraphConverter.convert_to_nncf_dtype(out.get_element_type()) parallel_inputs = None if len(inputs) > 1: @@ -109,7 +112,7 @@ def _add_edges_to_nncf_graph(model: ov.Model, graph: NNCFGraph) -> None: tensor_shape=tensor_shape, input_port_id=inputs[0].get_index(), output_port_id=output_port_id, - dtype=Dtype(nncf_dtype), + dtype=nncf_dtype, parallel_input_port_ids=parallel_inputs, ) @@ -189,8 +192,7 @@ def create_nncf_graph(model: ov.Model) -> NNCFGraph: if const_node is None: continue - ov_dtype = const_node.get_element_type().get_type_name() - if GraphConverter.convert_to_nncf_dtype(ov_dtype) == Dtype.INTEGER: + if GraphConverter.convert_to_nncf_dtype(const_node.get_element_type()) == Dtype.INTEGER: continue const_attrs[const_port_id] = { diff --git a/nncf/openvino/graph/node_utils.py b/nncf/openvino/graph/node_utils.py index ae9bf264ea5..533580b92b2 100644 --- a/nncf/openvino/graph/node_utils.py +++ b/nncf/openvino/graph/node_utils.py @@ -379,20 +379,6 @@ def get_matmul_channel_axes(node: ov.Node) -> List[int]: return [idx for idx, elem in enumerate(weights_layout) if elem in [OVLayoutElem.SPATIAL, OVLayoutElem.C_OUT]] -def get_channel_agnostic_reduction_axes(channel_axes: List[int], shape: List[int]) -> Optional[ReductionAxes]: - """ - Returns filtered reduction axes without axes that corresponds channels. - - :param channel_axes: List of the channel axes. - :param shape: Shape that need to be filtered. - :return: Reduction axes in tuple format. - """ - reduction_axes = list(range(len(shape))) - for channel_axis in sorted(channel_axes, reverse=True): - del reduction_axes[channel_axis] - return tuple(reduction_axes) - - def create_bias_tensor(node_without_bias: NNCFNode, graph: NNCFGraph, value: Any) -> np.ndarray: """ Creates bias value constant array filled by given value. diff --git a/nncf/openvino/quantization/quantize_model.py b/nncf/openvino/quantization/quantize_model.py index 69b1938329b..d74a656169e 100644 --- a/nncf/openvino/quantization/quantize_model.py +++ b/nncf/openvino/quantization/quantize_model.py @@ -19,6 +19,7 @@ from nncf.common.logging import nncf_logger from nncf.common.quantization.structs import QuantizationPreset from nncf.data import Dataset +from nncf.openvino.graph.metatypes.groups import OPERATIONS_OUTPUT_HAS_NO_BATCH_AXIS from nncf.openvino.graph.model_utils import remove_friendly_name_duplicates from nncf.openvino.graph.nncf_graph_builder import GraphConverter from nncf.openvino.graph.node_utils import get_number_if_op @@ -41,6 +42,7 @@ from nncf.quantization.algorithms.post_training.algorithm import PostTrainingQuantization from nncf.quantization.algorithms.weight_compression.algorithm import WeightCompression from nncf.quantization.quantize_model import quantize_with_tune_hyperparams +from nncf.quantization.quantize_model import warning_model_no_batchwise_support from nncf.quantization.telemetry_extractors import CompressionStartedWithQuantizeApi from nncf.scopes import IgnoredScope from nncf.telemetry.decorator import tracked_function @@ -81,6 +83,7 @@ def native_quantize_if_op_impl( ) graph = GraphConverter.create_nncf_graph(model) + warning_model_no_batchwise_support(graph, advanced_parameters, model_type, OPERATIONS_OUTPUT_HAS_NO_BATCH_AXIS) if_ops_number = get_number_if_op(model) all_models_number = if_ops_number * 2 + 1 nncf_logger.info( @@ -135,8 +138,8 @@ def native_quantize_impl( ignored_scope=ignored_scope, advanced_parameters=advanced_parameters, ) - graph = GraphConverter.create_nncf_graph(model) + warning_model_no_batchwise_support(graph, advanced_parameters, model_type, OPERATIONS_OUTPUT_HAS_NO_BATCH_AXIS) quantized_model = quantization_algorithm.apply(model, graph, dataset=calibration_dataset) if is_weight_compression_needed(advanced_parameters): diff --git a/nncf/quantization/advanced_parameters.py b/nncf/quantization/advanced_parameters.py index dff463260be..dd74741eecc 100644 --- a/nncf/quantization/advanced_parameters.py +++ b/nncf/quantization/advanced_parameters.py @@ -22,13 +22,14 @@ import nncf from nncf.common.quantization.structs import QuantizationScheme as QuantizationMode from nncf.common.utils.api_marker import api +from nncf.parameters import StrEnum from nncf.quantization.range_estimator import AggregatorType from nncf.quantization.range_estimator import RangeEstimatorParameters from nncf.quantization.range_estimator import StatisticsType -@api() -class OverflowFix(Enum): +@api(canonical_alias="nncf.OverflowFix") +class OverflowFix(StrEnum): """ This option controls whether to apply the overflow issue fix for the 8-bit quantization. @@ -58,7 +59,7 @@ class OverflowFix(Enum): @api() -class FP8Type(Enum): +class FP8Type(StrEnum): """ Defines FP8 special types (https://arxiv.org/pdf/2209.05433.pdf). @@ -190,6 +191,12 @@ class AdvancedQuantizationParameters: :type disable_channel_alignment: bool :param disable_bias_correction: Whether to disable the bias correction. :type disable_bias_correction: bool + :param batchwise_statistics: Determines whether quantizer statistics should be calculated + for each item of the batch or for the entire batch, default is None. + "None" means that if torch.DataLoader or tensorflow.Dataset was passed as a data source for + the calibration dataset, then in case batch_size of the data source > 1 batchwise_statistics sets to True, + otherwise sets to False. + :type batchwise_statistics: Optional[bool] :param activations_quantization_params: Quantization parameters for activations. :type activations_quantization_params: nncf.quantization.advanced_parameters.QuantizationParameters :param weights_quantization_params: Quantization parameters for weights. @@ -217,6 +224,7 @@ class AdvancedQuantizationParameters: inplace_statistics: bool = True disable_channel_alignment: bool = True disable_bias_correction: bool = False + batchwise_statistics: Optional[bool] = None # Advanced Quantization parameters activations_quantization_params: Union[QuantizationParameters, FP8QuantizationParameters] = None diff --git a/nncf/quantization/algorithms/channel_alignment/algorithm.py b/nncf/quantization/algorithms/channel_alignment/algorithm.py index c8582cef551..b30749b6d2c 100644 --- a/nncf/quantization/algorithms/channel_alignment/algorithm.py +++ b/nncf/quantization/algorithms/channel_alignment/algorithm.py @@ -22,6 +22,7 @@ from nncf.common.graph.transformations.commands import TargetPoint from nncf.common.graph.transformations.commands import TargetType from nncf.common.graph.transformations.layout import TransformationLayout +from nncf.common.graph.utils import get_reduction_axes from nncf.common.logging import nncf_logger from nncf.common.logging.track_progress import track from nncf.common.tensor_statistics.statistic_point import StatisticPoint @@ -389,7 +390,7 @@ def get_statistic_points(self, model: TModel, graph: NNCFGraph) -> StatisticPoin channel_axis = conv_in.metatype.output_channel_axis activation_shape = list(range(len(graph.get_output_edges(node_in)[0].tensor_shape))) - reduction_axes = self._backend_entity.get_channel_agnostic_reduction_axes([channel_axis], activation_shape) + reduction_axes = get_reduction_axes([0, channel_axis], activation_shape) statistic_collector = self._backend_entity.get_statistic_collector( reduction_axes, self._quantile, self.subset_size, self.inplace_statistics diff --git a/nncf/quantization/algorithms/channel_alignment/backend.py b/nncf/quantization/algorithms/channel_alignment/backend.py index efdfbb7be2f..b781ab923d3 100644 --- a/nncf/quantization/algorithms/channel_alignment/backend.py +++ b/nncf/quantization/algorithms/channel_alignment/backend.py @@ -151,15 +151,3 @@ def create_bias_tensor(node: NNCFNode, nncf_graph: NNCFGraph, value: Any) -> np. :param value: Value to fill bias constant array. :return: Bias value constant array filled by given value. """ - - @staticmethod - @abstractmethod - def get_channel_agnostic_reduction_axes(channel_axis: int, shape: Tuple[int]) -> Tuple[int]: - """ - Returns filtered reduction shape without axes that corresponds channels. - Example: channel_axis=-2, shape=(1, 3, 2, 4), result=(0, 1, 3). - - :param channel_axes: List of the channel axes. - :param shape: Shape that need to be filtered. - :return: Reduction shape in tuple format. - """ diff --git a/nncf/quantization/algorithms/channel_alignment/openvino_backend.py b/nncf/quantization/algorithms/channel_alignment/openvino_backend.py index 83512ccd4a4..f4596b1b36d 100644 --- a/nncf/quantization/algorithms/channel_alignment/openvino_backend.py +++ b/nncf/quantization/algorithms/channel_alignment/openvino_backend.py @@ -19,7 +19,6 @@ from nncf.common.graph import NNCFNode from nncf.common.graph.layer_attributes import ConvolutionLayerAttributes from nncf.common.graph.transformations.commands import TargetType -from nncf.common.tensor_statistics.collectors import ReductionAxes from nncf.common.tensor_statistics.collectors import TensorStatisticCollectorBase from nncf.experimental.common.tensor_statistics.collectors import MedianAggregator from nncf.experimental.common.tensor_statistics.collectors import TensorCollector @@ -35,7 +34,6 @@ from nncf.openvino.graph.metatypes.openvino_metatypes import OVSubtractMetatype from nncf.openvino.graph.node_utils import create_bias_tensor from nncf.openvino.graph.node_utils import get_bias_value -from nncf.openvino.graph.node_utils import get_channel_agnostic_reduction_axes from nncf.openvino.graph.node_utils import get_node_with_bias_value from nncf.openvino.graph.node_utils import get_weight_value from nncf.openvino.graph.transformations.commands import OVTargetPoint @@ -87,7 +85,9 @@ def get_statistic_collector( quantile_reducer = OVQuantileReducer(reduction_axes, (q, 1 - q), inplace) for port_id, container_key in enumerate([OVMinMaxTensorStatistic.MIN_STAT, OVMinMaxTensorStatistic.MAX_STAT]): - aggregator = MedianAggregator(OVNNCFCollectorTensorProcessor, num_samples=num_samples) + aggregator = MedianAggregator( + OVNNCFCollectorTensorProcessor, num_samples=num_samples, aggregation_axes=(0, 1) + ) tensor_collector.register_statistic_branch(container_key, quantile_reducer, aggregator, port_id) return tensor_collector @@ -136,7 +136,3 @@ def get_conv_layer_attributes(node: NNCFNode) -> ConvolutionLayerAttributes: @staticmethod def create_bias_tensor(node: NNCFNode, nncf_graph: NNCFGraph, value: Any) -> np.ndarray: return create_bias_tensor(node, nncf_graph, value) - - @staticmethod - def get_channel_agnostic_reduction_axes(channel_axis: int, shape: Tuple[int]) -> ReductionAxes: - return get_channel_agnostic_reduction_axes(channel_axes=channel_axis, shape=shape) diff --git a/nncf/quantization/algorithms/fast_bias_correction/onnx_backend.py b/nncf/quantization/algorithms/fast_bias_correction/onnx_backend.py index f9f2e98d125..8eb6ba04c4a 100644 --- a/nncf/quantization/algorithms/fast_bias_correction/onnx_backend.py +++ b/nncf/quantization/algorithms/fast_bias_correction/onnx_backend.py @@ -19,6 +19,7 @@ from nncf.common.graph.transformations.commands import TargetType from nncf.experimental.common.tensor_statistics.collectors import TensorCollector from nncf.experimental.tensor import Tensor +from nncf.onnx.graph.metatypes.groups import OPERATIONS_WITH_BIAS_REDUCED from nncf.onnx.graph.node_utils import get_bias_value from nncf.onnx.graph.node_utils import is_any_weight_quantized from nncf.onnx.graph.node_utils import is_node_with_bias @@ -89,7 +90,7 @@ def is_quantized_weights(node: NNCFNode, nncf_graph: NNCFGraph) -> bool: @staticmethod def is_node_with_bias(node: NNCFNode, nncf_graph: NNCFGraph) -> bool: - return is_node_with_bias(node) + return is_node_with_bias(node) and node.metatype in OPERATIONS_WITH_BIAS_REDUCED @staticmethod def get_node_names_for_input_output_statistics(node: NNCFNode, nncf_graph: NNCFGraph) -> Tuple[str, str]: diff --git a/nncf/quantization/algorithms/min_max/algorithm.py b/nncf/quantization/algorithms/min_max/algorithm.py index 1d26ced1296..92b2d5067f5 100644 --- a/nncf/quantization/algorithms/min_max/algorithm.py +++ b/nncf/quantization/algorithms/min_max/algorithm.py @@ -141,6 +141,7 @@ def __init__( overflow_fix: Optional[OverflowFix] = None, quantize_outputs: bool = False, inplace_statistics: bool = True, + batchwise_statistics: bool = False, activations_quantization_params: Union[QuantizationParameters, FP8QuantizationParameters] = None, weights_quantization_params: Union[QuantizationParameters, FP8QuantizationParameters] = None, activations_range_estimator_params: Optional[RangeEstimatorParameters] = None, @@ -171,6 +172,8 @@ def __init__( :param inplace_statistics: Defines wheather to calculate quantizers statistics by backend graph operations or by default Python implementation, defaults to True. + :param batchwise_statistics: Determines whether quantizer statistics should be calculated + for each item of the batch or for the entire batch, default is False. :param activations_quantization_params: Quantization parameters for model activations. :param weights_quantization_params: Quantization parameters for model weights. @@ -187,6 +190,7 @@ def __init__( self._overflow_fix = overflow_fix self._quantize_outputs = quantize_outputs self._inplace_statistics = inplace_statistics + self._batchwise_statistics = batchwise_statistics self._backend_params = backend_params self._activations_quantization_params = activations_quantization_params self._weights_quantization_params = weights_quantization_params @@ -395,34 +399,51 @@ def _get_range_estimator_parameters( def _get_stat_collector( self, - nncf_graph: NNCFGraph, + graph: NNCFGraph, target_point: TargetPoint, - quantizer_config: QuantizerConfig, - num_samples: int, + qconfig: QuantizerConfig, + batchwise_statistics: bool, ) -> TensorStatisticCollectorBase: """ Creates and returns a statistic collector based on the quantizer's configuration. - :param nncf_graph: NNCFGraph instance. + :param graph: NNCFGraph instance. :param target_point: Target point indicates where statistics should be collected. - :param quantizer_config: Configuration of a quantizer layer, + :param qconfig: Configuration of a quantizer layer, defining the configuration of created statistic collector. - :param num_samples: Number of samples to collect from the 'target_point'. + :param batchwise_statistics: Determines whether quantizer statistics should be calculated + for each item of the batch or for the entire batch. :return: Statistic Collector. """ - range_estimator_params = self._get_range_estimator_parameters(target_point, quantizer_config) + is_weight = target_point.is_weight_target_point() + node = graph.get_node_by_name(target_point.target_node_name) + shape = self._backend_entity.get_target_point_shape(graph, node, target_point) + range_estimator_params = self._get_range_estimator_parameters(target_point, qconfig) + + channel_axes = () + if qconfig.per_channel: + channel_axes = self._backend_entity.get_weight_quantization_axes(node, target_point) if is_weight else (1,) + + # Weight statistics is constant, so only one collection is enough. + num_samples = self._subset_size if not is_weight else 1 + + batchwise_statistics = batchwise_statistics and not is_weight collector_params = RangeInitCollectorParams( - is_weights=target_point.is_weight_target_point(), - scheme=quantizer_config.mode, - per_channel=quantizer_config.per_channel, + is_weights=is_weight, scheme=qconfig.mode, per_channel=qconfig.per_channel ) + reduction_axes, aggregation_axes = None, None + if shape is not None: + reduction_axes, aggregation_axes = collector_params.get_reduction_aggregation_axes( + shape, channel_axes, batchwise_statistics + ) + return self._backend_entity.get_statistic_collector( range_estimator_params, - nncf_graph, - target_point, - collector_params, - inplace=self._inplace_statistics, + collector_params.use_abs_max, + reduction_axes, + aggregation_axes, + self._inplace_statistics, num_samples=num_samples, ) @@ -842,25 +863,29 @@ def filter_func(point: StatisticPoint) -> bool: group_statistics.append(statistics) unified_values = self._backend_entity.unify_statistics(group_statistics) - for quantization_target_point in unified_scale_group: - qconfig = quantization_target_points[quantization_target_point] - q_group = QuantizerGroup.ACTIVATIONS - narrow_range = get_quantizer_narrow_range(qconfig, q_group) - if self._mode is not None: - destination_type = self._quantization_params[q_group].destination_type - parameters = calculate_convert_parameters( - unified_values, is_per_channel=qconfig.per_channel, destination_type=destination_type - ) - command = self._backend_entity.create_convert_insertion_command( - quantization_target_point, parameters - ) - else: - parameters = calculate_quantizer_parameters(unified_values, qconfig, q_group, narrow_range) - command = self._backend_entity.create_quantizer_insertion_command( - graph, quantization_target_point, qconfig, parameters + qconfigs = [quantization_target_points[qtp] for qtp in unified_scale_group] + if any(qconfigs[0] != qconfig for qconfig in qconfigs[1:]): + raise nncf.InternalError(f"QConfigs for unified scale group {unified_scale_group} are not equal") + qconfig = qconfigs[0] + q_group = QuantizerGroup.ACTIVATIONS + narrow_range = get_quantizer_narrow_range(qconfig, q_group) + if self._mode is not None: + destination_type = self._quantization_params[q_group].destination_type + parameters = calculate_convert_parameters( + unified_values, is_per_channel=qconfig.per_channel, destination_type=destination_type + ) + for quantization_target_point in unified_scale_group: + transformation_layout.register( + self._backend_entity.create_convert_insertion_command(quantization_target_point, parameters) ) + continue + parameters = calculate_quantizer_parameters(unified_values, qconfig, q_group, narrow_range) + commands = self._backend_entity.create_unified_scales_quantizers_insertion_commands( + graph, unified_scale_group, qconfig, parameters + ) + for command in commands: transformation_layout.register(command) - unified_ops_list.add(quantization_target_point) + unified_ops_list.update(unified_scale_group) for quantization_target_point, qconfig in quantization_target_points.items(): if quantization_target_point in unified_ops_list: @@ -914,11 +939,9 @@ def get_statistic_points(self, model: TModel, graph: NNCFGraph) -> StatisticPoin f"Adding target point {quantization_target_point.target_node_name}" f" with type {quantization_target_point.type} for statistics collection" ) - num_samples = self._subset_size - if quantization_target_point.is_weight_target_point(): - # Weight statistics is constant, so only one collection is enough. - num_samples = 1 - stat_collector = self._get_stat_collector(graph, quantization_target_point, qconfig, num_samples) + stat_collector = self._get_stat_collector( + graph, quantization_target_point, qconfig, self._batchwise_statistics + ) output.add_statistic_point( StatisticPoint( target_point=quantization_target_point, diff --git a/nncf/quantization/algorithms/min_max/backend.py b/nncf/quantization/algorithms/min_max/backend.py index 7f00e8fc62a..2f2e7b7361d 100644 --- a/nncf/quantization/algorithms/min_max/backend.py +++ b/nncf/quantization/algorithms/min_max/backend.py @@ -11,7 +11,7 @@ from abc import ABC from abc import abstractmethod -from typing import Dict, List, Optional, Set, TypeVar +from typing import Dict, List, Optional, Set, Tuple, TypeVar from nncf.common.graph.graph import NNCFGraph from nncf.common.graph.graph import NNCFNode @@ -20,7 +20,6 @@ from nncf.common.graph.transformations.commands import TargetType from nncf.common.graph.transformations.commands import TransformationCommand from nncf.common.hardware.config import HWConfig -from nncf.common.quantization.initialization.range import RangeInitCollectorParams from nncf.common.quantization.structs import QuantizerConfig from nncf.common.tensor_statistics.collectors import TensorStatisticCollectorBase from nncf.common.tensor_statistics.statistics import MinMaxTensorStatistic @@ -142,12 +141,31 @@ def create_quantizer_insertion_command( Returns backend-specific quantizer insertion command. :param nncf_graph: NNCFGraph to get input/output shapes for the target point. - :param target_point: Target location for the correction. + :param target_point: Target location for the quantizer insertion. :param quantizer_config: QuantizerConfig instance for the current layer. :param parameters: FakeQuantizeParameters to calculate activation quantization parameters. :return: Backend-specific TransformationCommand for the quantizer insertion operation. """ + @staticmethod + @abstractmethod + def create_unified_scales_quantizers_insertion_commands( + nncf_graph: NNCFGraph, + target_points: List[TargetPoint], + quantizer_config: QuantizerConfig, + parameters: FakeQuantizeParameters, + ) -> List[TransformationCommand]: + """ + Returns backend-specific unified scales quantizers insertion commands. + + :param nncf_graph: NNCFGraph to get input/output shapes for the target point. + :param target_points: List of target locations for the quantizers insertion. + :param quantizer_config: QuantizerConfig instance for the current layer. + :param parameters: FakeQuantizeParameters to calculate activation quantization parameters. + :return: List of backend-specific TransformationCommands + for the quantizers with unified scales insertion operations. + """ + @staticmethod @abstractmethod def create_convert_insertion_command( @@ -183,23 +201,46 @@ def unify_statistics(statistics: List[MinMaxTensorStatistic]) -> MinMaxTensorSta :return: Unified MinMaxTensorStatistic value. """ + @staticmethod + @abstractmethod + def get_target_point_shape(nncf_graph: NNCFGraph, node: NNCFNode, target_point: TargetPoint) -> Tuple[int, ...]: + """ + Returns shape of a targer point tensor. + + :param nncf_graph: NNCFGraph instance. + :param node: NNCFNode. + :param target_point: Target point of which tensor shape is seeked. + :return: Shape of target point tensor. + """ + + @staticmethod + @abstractmethod + def get_weight_quantization_axes(node: NNCFNode, target_point: TargetPoint) -> Tuple[int, ...]: + """ + Returns axes for per-channel quantization of weights of the node placed on a input port_id. + + :param node: Quantized node with the wieght. + :param target_point: Corresponding target point. + :return: Axes for per-channel quantization of weights. + """ + @staticmethod @abstractmethod def get_statistic_collector( range_estimator_params: RangeEstimatorParameters, - nncf_graph: NNCFGraph, - target_point: TargetPoint, - collector_params: RangeInitCollectorParams, + use_abs_max: bool, + reduction_axes: Optional[Tuple[int, ...]], + aggregation_axes: Optional[Tuple[int, ...]], inplace: bool, - num_samples: int = None, + num_samples: Optional[int] = None, ) -> TensorStatisticCollectorBase: """ Returns backend-specific statistic collector. :param range_estimator_params: Parameters that specify estimators types. - :param nncf_graph: NNCFGraph to get input/output shapes for the target point. - :param target_point: Target location for the correction. - :param collector_params: RangeInitCollectorParams instance for the current layer. + :param use_abs_max: Wheather reduce absolute values of input tensors or not. + :param reduction_axes: Axes for reducer. + :param aggregation_axes: Axes for aggregator. :param inplace: Whether to calculate statistic inplace or not. :param num_samples: Maximum number of samples to collect. :return: Backend-specific TensorStatisticCollectorBase for the statistics calculation. diff --git a/nncf/quantization/algorithms/min_max/onnx_backend.py b/nncf/quantization/algorithms/min_max/onnx_backend.py index 6f270aa6683..f58299a5d10 100644 --- a/nncf/quantization/algorithms/min_max/onnx_backend.py +++ b/nncf/quantization/algorithms/min_max/onnx_backend.py @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, List, Optional, Set +from typing import Dict, List, Optional, Set, Tuple import numpy as np @@ -20,16 +20,14 @@ from nncf.common.graph.transformations.commands import TargetType from nncf.common.graph.transformations.commands import TransformationCommand from nncf.common.hardware.config import HWConfig -from nncf.common.quantization.initialization.range import RangeInitCollectorParams from nncf.common.quantization.structs import QuantizerConfig from nncf.experimental.common.tensor_statistics.collectors import AGGREGATORS_MAP from nncf.experimental.common.tensor_statistics.collectors import TensorCollector from nncf.onnx.graph.metatypes import onnx_metatypes as om from nncf.onnx.graph.metatypes.groups import MATMUL_METATYPES from nncf.onnx.graph.node_utils import get_input_edges_mapping -from nncf.onnx.graph.node_utils import get_quantization_axis from nncf.onnx.graph.node_utils import get_quantized_tensor_shape -from nncf.onnx.graph.node_utils import get_reduction_shape +from nncf.onnx.graph.node_utils import get_weight_quantization_axis from nncf.onnx.graph.transformations.commands import ONNXQuantizerInsertionCommand from nncf.onnx.graph.transformations.commands import ONNXTargetPoint from nncf.onnx.hardware.config import ONNXHWConfig @@ -120,16 +118,33 @@ def create_quantizer_insertion_command( target_point: ONNXTargetPoint, quantizer_config: QuantizerConfig, parameters: FakeQuantizeParameters, - ): + ) -> ONNXQuantizerInsertionCommand: tensor_type = np.int8 if np.any(parameters.input_low.data < 0) else np.uint8 - if target_point.is_weight_target_point(): + is_weight = target_point.is_weight_target_point() + if is_weight: tensor_type = np.int8 # The weight is restricted to have only signed range nncf_input_node_next_nodes = ONNXMinMaxAlgoBackend._get_input_edges_mapping(nncf_graph) node = nncf_graph.get_node_by_name(target_point.target_node_name) - axis = get_quantization_axis(quantizer_config.per_channel, node, target_point) + axis = () + if quantizer_config.per_channel: + axis = ONNXMinMaxAlgoBackend.get_weight_quantization_axes(node, target_point) if is_weight else (1,) onnx_parameters = convert_fq_params_to_onnx_params(parameters, quantizer_config.num_bits, tensor_type, axis) return ONNXQuantizerInsertionCommand(target_point, nncf_input_node_next_nodes, onnx_parameters) + @staticmethod + def create_unified_scales_quantizers_insertion_commands( + nncf_graph: NNCFGraph, + target_points: List[ONNXTargetPoint], + quantizer_config: QuantizerConfig, + parameters: FakeQuantizeParameters, + ) -> List[ONNXQuantizerInsertionCommand]: + return [ + ONNXMinMaxAlgoBackend.create_quantizer_insertion_command( + nncf_graph, target_point, quantizer_config, parameters + ) + for target_point in target_points + ] + @staticmethod def create_convert_insertion_command( target_point: ONNXTargetPoint, @@ -153,23 +168,23 @@ def unify_statistics( def _get_input_edges_mapping(nncf_graph: NNCFGraph): return get_input_edges_mapping(nncf_graph) + @staticmethod + def get_target_point_shape(nncf_graph: NNCFGraph, node: NNCFNode, target_point: ONNXTargetPoint) -> Tuple[int, ...]: + return get_quantized_tensor_shape(nncf_graph, node, target_point) + + @staticmethod + def get_weight_quantization_axes(node: NNCFNode, target_point: ONNXTargetPoint) -> Tuple[int]: + return (get_weight_quantization_axis(node, target_point.port_id),) + @staticmethod def get_statistic_collector( range_estimator_params: RangeEstimatorParameters, - nncf_graph: NNCFGraph, - target_point: ONNXTargetPoint, - collector_params: RangeInitCollectorParams, + use_abs_max: bool, + reduction_axes: Optional[Tuple[int, ...]], + aggregation_axes: Optional[Tuple[int, ...]], inplace: bool, - num_samples: int = None, + num_samples: Optional[int] = None, ) -> TensorCollector: - is_per_channel = collector_params.is_per_channel - node = nncf_graph.get_node_by_name(target_point.target_node_name) - use_abs_max = collector_params.use_abs_max - quantization_axis = get_quantization_axis(is_per_channel, node, target_point) - quantized_tensor_shape = get_quantized_tensor_shape(nncf_graph, node, target_point) - reduction_axes = None # Per-Tensor - if quantization_axis is not None and quantized_tensor_shape is not None: # Per-Channel - reduction_axes = get_reduction_shape(quantized_tensor_shape, quantization_axis) collector = TensorCollector(ONNXMinMaxTensorStatistic) for params, container_key in zip( [range_estimator_params.min, range_estimator_params.max], @@ -179,29 +194,26 @@ def get_statistic_collector( raise nncf.InternalError( f"Statistic type: {params.statistics_type} is not supported for ONNX PTQ backend yet." ) - if params.aggregator_type not in AGGREGATORS_MAP: raise nncf.InternalError( f"Aggregator type: {params.aggregator_type} is not supported for ONNX PTQ backend yet." ) - - statistic_type = params.statistics_type - kwargs = {"reduction_axes": reduction_axes, "inplace": inplace} - if statistic_type in [StatisticsType.QUANTILE, StatisticsType.ABS_QUANTILE]: - # TODO(dlyakhov): merge two quantile aggregators in one + kwargs = {"reduction_axes": reduction_axes, "inplace": False} + if params.statistics_type in [StatisticsType.QUANTILE, StatisticsType.ABS_QUANTILE]: if container_key == ONNXMinMaxTensorStatistic.MIN_STAT: quantile = params.quantile_outlier_prob else: quantile = 1 - params.quantile_outlier_prob kwargs.update({"quantile": [quantile]}) + # TODO(dlyakhov): merge two quantile aggregators in one + statistic_type = params.statistics_type if use_abs_max and statistic_type == StatisticsType.MAX: statistic_type = StatisticsType.ABS_MAX - reducer = ONNX_REDUCERS_MAP[statistic_type](reduction_axes=reduction_axes) + reducer = ONNX_REDUCERS_MAP[statistic_type](**kwargs) - aggregation_axes = (0,) aggregator = AGGREGATORS_MAP[params.aggregator_type]( - aggregation_axes=aggregation_axes, num_samples=num_samples, + aggregation_axes=aggregation_axes, tensor_processor=ONNXNNCFCollectorTensorProcessor, ) diff --git a/nncf/quantization/algorithms/min_max/openvino_backend.py b/nncf/quantization/algorithms/min_max/openvino_backend.py index 1b258618ee5..417f9c7cbec 100644 --- a/nncf/quantization/algorithms/min_max/openvino_backend.py +++ b/nncf/quantization/algorithms/min_max/openvino_backend.py @@ -19,16 +19,13 @@ from nncf.common.graph.operator_metatypes import OperatorMetatype from nncf.common.graph.transformations.commands import TargetType from nncf.common.hardware.config import HWConfig -from nncf.common.quantization.initialization.range import RangeInitCollectorParams from nncf.common.quantization.structs import QuantizerConfig -from nncf.common.tensor_statistics.collectors import ReductionAxes from nncf.experimental.common.tensor_statistics.collectors import AGGREGATORS_MAP from nncf.experimental.common.tensor_statistics.collectors import TensorCollector from nncf.openvino.graph.layer_attributes import OVLayerAttributes from nncf.openvino.graph.metatypes import openvino_metatypes as om from nncf.openvino.graph.metatypes.groups import OPERATIONS_WITH_WEIGHTS from nncf.openvino.graph.model_utils import get_start_nodes_for_activation_path_tracing -from nncf.openvino.graph.node_utils import get_channel_agnostic_reduction_axes from nncf.openvino.graph.node_utils import get_weight_channel_axes from nncf.openvino.graph.transformations.commands import OVConvertInsertionCommand from nncf.openvino.graph.transformations.commands import OVQuantizerInsertionCommand @@ -123,6 +120,15 @@ def create_quantizer_insertion_command( ) -> OVQuantizerInsertionCommand: return OVQuantizerInsertionCommand(target_point, parameters) + @staticmethod + def create_unified_scales_quantizers_insertion_commands( + nncf_graph: NNCFGraph, + target_points: List[OVTargetPoint], + quantizer_config: QuantizerConfig, + parameters: FakeQuantizeParameters, + ) -> List[OVQuantizerInsertionCommand]: + return [OVQuantizerInsertionCommand(target_point, parameters) for target_point in target_points] + @staticmethod def create_convert_insertion_command( target_point: OVTargetPoint, @@ -141,47 +147,28 @@ def unify_statistics(statistics: List[OVMinMaxTensorStatistic]) -> OVMinMaxTenso return OVMinMaxTensorStatistic(min_values=min_values, max_values=max_values) @staticmethod - def _get_reduction_axes( - nncf_graph: NNCFGraph, target_point: OVTargetPoint, collector_params: RangeInitCollectorParams - ) -> Tuple[ReductionAxes, bool]: - if not collector_params.is_per_channel: - return None + def get_target_point_shape(nncf_graph: NNCFGraph, node: NNCFNode, target_point: OVTargetPoint) -> Tuple[int, ...]: + if target_point.is_weight_target_point(): + return node.layer_attributes.constant_attributes[target_point.port_id]["shape"] + if target_point.type == TargetType.PRE_LAYER_OPERATION: + return nncf_graph.get_input_edges(node)[target_point.port_id].tensor_shape + elif target_point.type == TargetType.POST_LAYER_OPERATION: + return nncf_graph.get_output_edges(node)[target_point.port_id].tensor_shape + raise NotImplementedError(f"Unsupported target point type {target_point.type}.") - node = nncf_graph.get_node_by_name(target_point.target_node_name) - if not target_point.is_weight_target_point(): - if target_point.type == TargetType.PRE_LAYER_OPERATION: - shape = nncf_graph.get_input_edges(node)[target_point.port_id].tensor_shape - elif target_point.type == TargetType.POST_LAYER_OPERATION: - shape = nncf_graph.get_output_edges(node)[target_point.port_id].tensor_shape - else: - raise NotImplementedError(f"Unsupported target point type {target_point.type}.") - - # TODO (l-bat): Disable quantizer propagation through layout changing operations - channel_axis = 1 # OpenVINO activations have channel first layout: [N, C, Z, Y, X] - axes = get_channel_agnostic_reduction_axes([channel_axis], shape) - return axes - - assert isinstance(node.layer_attributes, OVLayerAttributes) - const_shape = node.layer_attributes.constant_attributes[target_point.port_id]["shape"] - - if collector_params.is_per_channel: - channel_axes = get_weight_channel_axes(node) - axes = get_channel_agnostic_reduction_axes(channel_axes, const_shape) - else: - axes = tuple(range(len(const_shape))) - return axes + @staticmethod + def get_weight_quantization_axes(node: NNCFNode, target_point: OVTargetPoint) -> Tuple[int]: + return tuple(get_weight_channel_axes(node)) @staticmethod def get_statistic_collector( range_estimator_params: RangeEstimatorParameters, - nncf_graph: NNCFGraph, - target_point: OVTargetPoint, - collector_params: RangeInitCollectorParams, + use_abs_max: bool, + reduction_axes: Optional[Tuple[int, ...]], + aggregation_axes: Optional[Tuple[int, ...]], inplace: bool, - num_samples: int = None, + num_samples: Optional[int] = None, ) -> TensorCollector: - reduction_axes = OVMinMaxAlgoBackend._get_reduction_axes(nncf_graph, target_point, collector_params) - collector = TensorCollector(OVMinMaxTensorStatistic) for params, container_key in zip( [range_estimator_params.min, range_estimator_params.max], @@ -191,12 +178,10 @@ def get_statistic_collector( raise nncf.InternalError( f"Statistic type: {params.statistics_type} is not supported for OpenVino PTQ backend yet." ) - if params.aggregator_type not in AGGREGATORS_MAP: raise nncf.InternalError( f"Aggregator type: {params.aggregator_type} is not supported for OpenVino PTQ backend yet." ) - kwargs = {"reduction_axes": reduction_axes, "inplace": inplace} if params.statistics_type in [StatisticsType.QUANTILE, StatisticsType.ABS_QUANTILE]: if container_key == OVMinMaxTensorStatistic.MIN_STAT: @@ -206,12 +191,15 @@ def get_statistic_collector( kwargs.update({"quantile": [quantile]}) # TODO(dlyakhov): merge two quantile aggregators in one statistic_type = params.statistics_type - if collector_params.use_abs_max and statistic_type == StatisticsType.MAX: + if use_abs_max and statistic_type == StatisticsType.MAX: statistic_type = StatisticsType.ABS_MAX reducer = OV_REDUCERS_MAP[statistic_type](**kwargs) - kwargs = {"num_samples": num_samples, "tensor_processor": OVNNCFCollectorTensorProcessor} - aggregator = AGGREGATORS_MAP[params.aggregator_type](**kwargs) + aggregator = AGGREGATORS_MAP[params.aggregator_type]( + num_samples=num_samples, + aggregation_axes=aggregation_axes, + tensor_processor=OVNNCFCollectorTensorProcessor, + ) collector.register_statistic_branch(container_key, reducer, aggregator) return collector diff --git a/nncf/quantization/algorithms/min_max/torch_backend.py b/nncf/quantization/algorithms/min_max/torch_backend.py index 06d1c91e6f1..541792eca78 100644 --- a/nncf/quantization/algorithms/min_max/torch_backend.py +++ b/nncf/quantization/algorithms/min_max/torch_backend.py @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, List, Optional, Set, Tuple +from typing import Dict, List, Optional, Set, Tuple, Union import torch @@ -23,7 +23,6 @@ from nncf.common.graph.transformations.commands import TargetType from nncf.common.graph.transformations.commands import TransformationCommand from nncf.common.hardware.config import HWConfig -from nncf.common.quantization.initialization.range import RangeInitCollectorParams from nncf.common.quantization.structs import QuantizationScheme as QuantizationMode from nncf.common.quantization.structs import QuantizerConfig from nncf.experimental.common.tensor_statistics.collectors import AGGREGATORS_MAP @@ -37,11 +36,13 @@ from nncf.quantization.range_estimator import RangeEstimatorParameters from nncf.torch.graph.graph import PTNNCFGraph from nncf.torch.graph.graph import PTTargetPoint -from nncf.torch.graph.transformations.commands import PTQuantizerInsertionCommand +from nncf.torch.graph.transformations.command_creation import create_quantizer_insertion_command +from nncf.torch.graph.transformations.command_creation import create_shared_quantizer_insertion_command +from nncf.torch.graph.transformations.commands import PTInsertionCommand +from nncf.torch.graph.transformations.commands import PTSharedFnInsertionCommand from nncf.torch.hardware.config import PTHWConfig from nncf.torch.nncf_network import NNCFNetwork from nncf.torch.quantization.default_quantization import DEFAULT_PT_QUANT_TRAIT_TO_OP_DICT -from nncf.torch.quantization.init_range import PTRangeInitCollectorParams from nncf.torch.quantization.layers import QUANTIZATION_MODULES from nncf.torch.quantization.layers import AsymmetricQuantizer from nncf.torch.quantization.layers import BaseQuantizer @@ -130,17 +131,6 @@ def target_point(target_type: TargetType, target_node_name: str, port_id: int) - target_type = PTMinMaxAlgoBackend.TARGET_TYPE_TO_PT_INS_TYPE_MAP[target_type] return PTTargetPoint(target_type, target_node_name, input_port_id=port_id) - @staticmethod - def create_quantizer_insertion_command( - nncf_graph: NNCFGraph, - target_point: PTTargetPoint, - quantizer_config: QuantizerConfig, - parameters: FakeQuantizeParameters, - ) -> PTQuantizerInsertionCommand: - return PTMinMaxAlgoBackend._create_quantizer_insertion_command( - nncf_graph, target_point, quantizer_config, parameters - ) - @staticmethod def create_convert_insertion_command( target_point: PTTargetPoint, @@ -158,19 +148,25 @@ def unify_statistics(statistics: List[PTMinMaxTensorStatistic]) -> PTMinMaxTenso min_values = torch.amin(torch.stack(min_values), dim=0) return PTMinMaxTensorStatistic(min_values=min_values, max_values=max_values) + @staticmethod + def get_target_point_shape(nncf_graph: NNCFGraph, node: NNCFNode, target_point: PTTargetPoint) -> Tuple[int, ...]: + if target_point.is_weight_target_point(): + return tuple(node.layer_attributes.get_weight_shape()) + return nncf_graph.get_input_shape_for_insertion_point(target_point) + + @staticmethod + def get_weight_quantization_axes(node: NNCFNode, target_point: PTTargetPoint) -> Tuple[int]: + return (node.layer_attributes.get_target_dim_for_compression(),) + @staticmethod def get_statistic_collector( range_estimator_params: RangeEstimatorParameters, - nncf_graph: NNCFGraph, - target_point: PTTargetPoint, - collector_params: RangeInitCollectorParams, + use_abs_max: bool, + reduction_axes: Optional[Tuple[int, ...]], + aggregation_axes: Optional[Tuple[int, ...]], inplace: bool, - num_samples: int = None, + num_samples: Optional[int] = None, ) -> TensorCollector: - collector_params = PTMinMaxAlgoBackend._default_collector_params(nncf_graph, target_point, collector_params) - reduction_axes = collector_params.get_reduction_axes(per_sample_stats=False) - aggregation_axes = collector_params.get_aggregation_axes(per_sample_stats=False) - collector = TensorCollector(PTMinMaxTensorStatistic) for params, container_key in zip( [range_estimator_params.min, range_estimator_params.max], @@ -195,7 +191,7 @@ def get_statistic_collector( quantile = 1 - params.quantile_outlier_prob reducer = PT_REDUCERS_MAP[statistic_type](reduction_axes=reduction_axes, quantile=[quantile]) else: - if collector_params.use_abs_max and statistic_type == StatisticsType.MAX: + if use_abs_max and statistic_type == StatisticsType.MAX: statistic_type = StatisticsType.ABS_MAX reducer = PT_REDUCERS_MAP[statistic_type](reduction_axes=reduction_axes) @@ -246,21 +242,6 @@ def _get_input_scale_shape( return input_shape, scale_shape, channel_idx - @staticmethod - def _default_collector_params( - nncf_graph: NNCFGraph, target_point: PTTargetPoint, collector_params: RangeInitCollectorParams - ) -> PTRangeInitCollectorParams: - input_shape, _, channel_idx = PTMinMaxAlgoBackend._get_input_scale_shape( - nncf_graph, target_point, collector_params.is_per_channel - ) - return PTRangeInitCollectorParams( - is_weights=collector_params.is_weights, - scheme=collector_params.scheme, - per_channel=collector_params.is_per_channel, - input_shape=input_shape, - channel_idx=channel_idx, - ) - @staticmethod def _create_quantizer( quantizer_config: QuantizerConfig, @@ -301,12 +282,12 @@ def _fill_quantizer_parameters(quantizer: BaseQuantizer, parameters: FakeQuantiz quantizer.scale = torch.nn.Parameter(parameters.input_high.data - quantizer.eps) @staticmethod - def _create_quantizer_insertion_command( + def create_quantizer_insertion_command( nncf_graph: NNCFGraph, target_point: PTTargetPoint, quantizer_config: QuantizerConfig, parameters: FakeQuantizeParameters, - ) -> PTQuantizerInsertionCommand: + ) -> Union[PTInsertionCommand, PTSharedFnInsertionCommand]: _, scale_shape, _ = PTMinMaxAlgoBackend._get_input_scale_shape( nncf_graph, target_point, quantizer_config.per_channel ) @@ -314,7 +295,23 @@ def _create_quantizer_insertion_command( quantizer = PTMinMaxAlgoBackend._create_quantizer( quantizer_config, scale_shape, parameters, target_point.target_type ) - return PTQuantizerInsertionCommand(target_point, quantizer) + return create_quantizer_insertion_command(target_point, quantizer) + + @staticmethod + def create_unified_scales_quantizers_insertion_commands( + nncf_graph: NNCFGraph, + target_points: List[PTTargetPoint], + quantizer_config: QuantizerConfig, + parameters: FakeQuantizeParameters, + ) -> List[PTSharedFnInsertionCommand]: + _, scale_shape, _ = PTMinMaxAlgoBackend._get_input_scale_shape( + nncf_graph, target_points[0], quantizer_config.per_channel + ) + + quantizer = PTMinMaxAlgoBackend._create_quantizer( + quantizer_config, scale_shape, parameters, target_points[0].target_type + ) + return [create_shared_quantizer_insertion_command(target_points, quantizer)] @staticmethod def get_ignored_metatypes(model_type: ModelType, device: TargetDevice) -> List[OperatorMetatype]: diff --git a/nncf/quantization/algorithms/post_training/pipeline.py b/nncf/quantization/algorithms/post_training/pipeline.py index 739442d3646..018f02f5e75 100644 --- a/nncf/quantization/algorithms/post_training/pipeline.py +++ b/nncf/quantization/algorithms/post_training/pipeline.py @@ -116,6 +116,7 @@ def create_ptq_pipeline( overflow_fix=advanced_parameters.overflow_fix, quantize_outputs=advanced_parameters.quantize_outputs, inplace_statistics=advanced_parameters.inplace_statistics, + batchwise_statistics=advanced_parameters.batchwise_statistics, activations_quantization_params=advanced_parameters.activations_quantization_params, weights_quantization_params=advanced_parameters.weights_quantization_params, activations_range_estimator_params=advanced_parameters.activations_range_estimator_params, diff --git a/nncf/quantization/algorithms/smooth_quant/algorithm.py b/nncf/quantization/algorithms/smooth_quant/algorithm.py index 3e2994e2bea..d31dd09c451 100644 --- a/nncf/quantization/algorithms/smooth_quant/algorithm.py +++ b/nncf/quantization/algorithms/smooth_quant/algorithm.py @@ -21,6 +21,7 @@ from nncf.common.graph.graph import NNCFNode from nncf.common.graph.operator_metatypes import OperatorMetatype from nncf.common.graph.transformations.layout import TransformationLayout +from nncf.common.graph.utils import get_reduction_axes from nncf.common.logging import nncf_logger from nncf.common.logging.track_progress import track from nncf.common.tensor_statistics.statistic_point import StatisticPoint @@ -369,7 +370,7 @@ def _calculate_input_reduction_axes(self, nncf_graph: NNCFGraph, node: NNCFNode, reduction_axes = tuple([]) if len(shape) > 1: channel_axis = self._backend_entity.get_activation_channel_axis(node, input_port) - reduction_axes = self._backend_entity.get_channel_agnostic_reduction_axes(channel_axis, shape) + reduction_axes = get_reduction_axes((channel_axis,), shape) return reduction_axes def _process_weight_statistics(self, node: NNCFNode, weights: Tensor) -> Tensor: diff --git a/nncf/quantization/algorithms/smooth_quant/backend.py b/nncf/quantization/algorithms/smooth_quant/backend.py index 52d5d1a28d5..11796186163 100644 --- a/nncf/quantization/algorithms/smooth_quant/backend.py +++ b/nncf/quantization/algorithms/smooth_quant/backend.py @@ -97,17 +97,6 @@ def get_activations_port_id(node: NNCFNode, nncf_graph: NNCFGraph) -> int: :return: Map with the activation & weighted ports. """ - @staticmethod - @abstractmethod - def get_channel_agnostic_reduction_axes(channel_axis: int, shape: Tuple[int]) -> Tuple[int]: - """ - Returns filtered reduction axes without axes that corresponds channels. - - :param channel_axes: List of the channel axes. - :param shape: Shape that need to be filtered. - :return: Reduction axes in tuple format. - """ - @staticmethod @abstractmethod def get_abs_max_channel_collector( diff --git a/nncf/quantization/algorithms/smooth_quant/openvino_backend.py b/nncf/quantization/algorithms/smooth_quant/openvino_backend.py index 6073a2744cf..d4107f1fa8c 100644 --- a/nncf/quantization/algorithms/smooth_quant/openvino_backend.py +++ b/nncf/quantization/algorithms/smooth_quant/openvino_backend.py @@ -28,7 +28,6 @@ from nncf.openvino.graph.metatypes.groups import QUANTIZE_AGNOSTIC_OPERATIONS from nncf.openvino.graph.metatypes.openvino_metatypes import OVConvolutionMetatype from nncf.openvino.graph.metatypes.openvino_metatypes import OVMatMulMetatype -from nncf.openvino.graph.node_utils import get_channel_agnostic_reduction_axes from nncf.openvino.graph.node_utils import get_weight_value from nncf.openvino.graph.transformations.command_creation import OVCommandCreator from nncf.openvino.graph.transformations.commands import OVMultiplyInsertionCommand @@ -77,10 +76,6 @@ def get_activations_port_id(node: NNCFNode, nncf_graph: NNCFGraph) -> int: raise nncf.InternalError(f"Too many weight or activation ports for {node.node_name} node") return activation_ports[0] - @staticmethod - def get_channel_agnostic_reduction_axes(channel_axis: int, shape: Tuple[int]) -> Tuple[int]: - return get_channel_agnostic_reduction_axes([channel_axis], shape) - @staticmethod def get_abs_max_channel_collector( num_samples: int, stats_reduction_axes: Tuple[int], inplace: bool, branch_key: str diff --git a/nncf/quantization/algorithms/smooth_quant/torch_backend.py b/nncf/quantization/algorithms/smooth_quant/torch_backend.py index a486be98a4f..275f9a2523e 100644 --- a/nncf/quantization/algorithms/smooth_quant/torch_backend.py +++ b/nncf/quantization/algorithms/smooth_quant/torch_backend.py @@ -24,7 +24,6 @@ from nncf.experimental.common.tensor_statistics.collectors import MaxAggregator from nncf.experimental.common.tensor_statistics.collectors import TensorCollector from nncf.experimental.tensor import Tensor -from nncf.openvino.graph.node_utils import get_channel_agnostic_reduction_axes from nncf.openvino.graph.transformations.commands import OVMultiplyInsertionCommand from nncf.openvino.graph.transformations.commands import OVWeightUpdateCommand from nncf.quantization.algorithms.smooth_quant.backend import SmoothQuantAlgoBackend @@ -87,10 +86,6 @@ def get_activations_port_id(node: NNCFNode, nncf_graph: NNCFGraph) -> int: # all nodes with the metatypes have 0 activation port id. return 0 - @staticmethod - def get_channel_agnostic_reduction_axes(channel_axis: int, shape: Tuple[int]) -> Tuple[int]: - return get_channel_agnostic_reduction_axes([channel_axis], shape) - @staticmethod def get_abs_max_channel_collector( num_samples: int, stats_reduction_axes: Tuple[int], inplace: bool, branch_key: str diff --git a/nncf/quantization/algorithms/weight_compression/algorithm.py b/nncf/quantization/algorithms/weight_compression/algorithm.py index a2a80fea257..6d354303aa1 100644 --- a/nncf/quantization/algorithms/weight_compression/algorithm.py +++ b/nncf/quantization/algorithms/weight_compression/algorithm.py @@ -310,7 +310,7 @@ def do_compression( weight = self._backend_entity.get_weight(node, weight_port_id, model, graph) if weight.dtype not in [TensorDataType.float32, TensorDataType.float16, TensorDataType.float64]: continue - reduction_axes = self._backend_entity.get_channel_agnostic_reduction_axes(node, weight_port_id, graph) + reduction_axes = self._backend_entity.get_reduction_axes(node, weight_port_id, graph) if ( self._group_size != -1 and self._all_layers diff --git a/nncf/quantization/algorithms/weight_compression/backend.py b/nncf/quantization/algorithms/weight_compression/backend.py index b6d61e7beb4..cfe5564dd11 100644 --- a/nncf/quantization/algorithms/weight_compression/backend.py +++ b/nncf/quantization/algorithms/weight_compression/backend.py @@ -60,9 +60,7 @@ def is_node_with_weights(node: NNCFNode, graph: NNCFGraph) -> bool: @staticmethod @abstractmethod - def get_channel_agnostic_reduction_axes( - node_with_weight: NNCFNode, weight_port_id: int, graph: NNCFGraph - ) -> Optional[Tuple[int]]: + def get_reduction_axes(node_with_weight: NNCFNode, weight_port_id: int, graph: NNCFGraph) -> Optional[Tuple[int]]: """ Returns reduction axes without axes that corresponds to weight channels of the node with weight. diff --git a/nncf/quantization/algorithms/weight_compression/openvino_backend.py b/nncf/quantization/algorithms/weight_compression/openvino_backend.py index 87793477dd3..f4d99638f62 100644 --- a/nncf/quantization/algorithms/weight_compression/openvino_backend.py +++ b/nncf/quantization/algorithms/weight_compression/openvino_backend.py @@ -17,11 +17,11 @@ from nncf.common.graph import NNCFNode from nncf.common.graph.operator_metatypes import OperatorMetatype from nncf.common.graph.transformations.commands import TargetType +from nncf.common.graph.utils import get_reduction_axes from nncf.experimental.common.tensor_statistics.collectors import TensorCollector from nncf.experimental.tensor.tensor import Tensor from nncf.openvino.graph.metatypes import openvino_metatypes as om from nncf.openvino.graph.model_transformer import OVModelTransformer -from nncf.openvino.graph.node_utils import get_channel_agnostic_reduction_axes from nncf.openvino.graph.node_utils import get_const_value from nncf.openvino.graph.node_utils import get_weight_channel_axes from nncf.openvino.graph.transformations.commands import OVTargetPoint @@ -61,12 +61,10 @@ def is_node_with_weights(node: NNCFNode, graph: NNCFGraph) -> bool: return node.layer_attributes and node.layer_attributes.constant_attributes @staticmethod - def get_channel_agnostic_reduction_axes( - node_with_weight: NNCFNode, weight_port_id: int, graph: NNCFGraph - ) -> Optional[Tuple[int]]: + def get_reduction_axes(node_with_weight: NNCFNode, weight_port_id: int, graph: NNCFGraph) -> Optional[Tuple[int]]: channel_axes = get_weight_channel_axes(node_with_weight) const_shape = node_with_weight.layer_attributes.constant_attributes[weight_port_id]["shape"] - return get_channel_agnostic_reduction_axes(channel_axes, const_shape) + return get_reduction_axes(channel_axes, const_shape) @staticmethod def target_point(target_type: TargetType, target_node_name: str, port_id: int) -> OVTargetPoint: diff --git a/nncf/quantization/algorithms/weight_compression/torch_backend.py b/nncf/quantization/algorithms/weight_compression/torch_backend.py index 3fc90a5e9c5..ee78b7c5899 100644 --- a/nncf/quantization/algorithms/weight_compression/torch_backend.py +++ b/nncf/quantization/algorithms/weight_compression/torch_backend.py @@ -103,9 +103,7 @@ def get_weight_names_and_port_ids(node: NNCFNode, graph: NNCFGraph) -> List[Tupl return weight_port_ids @staticmethod - def get_channel_agnostic_reduction_axes( - node_with_weight: NNCFNode, weight_port_id: int, graph: NNCFGraph - ) -> Optional[Tuple[int]]: + def get_reduction_axes(node_with_weight: NNCFNode, weight_port_id: int, graph: NNCFGraph) -> Optional[Tuple[int]]: weight_node = get_const_node(node_with_weight, weight_port_id, graph) ndims = len(weight_node.layer_attributes.shape) @@ -199,6 +197,7 @@ def transform_model( # calculates compressed weights and decompression parameters compressed_weight = compress_weight(Tensor(weight), wc_params.reduction_axes, compression_config) + compressed_weight.scale = compressed_weight.scale.astype(dtype=TensorDataType.float16) # pack compressed tensor packed_tensor = compressed_weight.tensor.astype(TensorDataType.uint8) @@ -219,7 +218,9 @@ def transform_model( packed_zero_point = compressed_weight.zero_point.astype(TensorDataType.uint8) # creates weight decompressor - decompressor = WeightsDecompressor(compressed_weight.scale.data, packed_zero_point.data) + decompressor = WeightsDecompressor( + compressed_weight.scale.data, packed_zero_point.data, result_dtype=weight.dtype + ) # registry weight decompression module in the model decompressor_name = f"weights_decompressor_{weight_node.node_name.replace('.', '_')}" diff --git a/nncf/quantization/quantize_model.py b/nncf/quantization/quantize_model.py index 6982b347600..fe8a69ace20 100644 --- a/nncf/quantization/quantize_model.py +++ b/nncf/quantization/quantize_model.py @@ -14,6 +14,9 @@ import nncf from nncf.api.compression import TModel from nncf.common.deprecation import warning_deprecated +from nncf.common.graph import NNCFGraph +from nncf.common.graph.operator_metatypes import OperatorMetatype +from nncf.common.logging.logger import nncf_logger from nncf.common.quantization.structs import QuantizationPreset from nncf.common.utils.api_marker import api from nncf.common.utils.backend import BackendType @@ -35,6 +38,56 @@ TTensor = TypeVar("TTensor") +BATCHWISE_STATISTICS_WARNING = ( + "For the particular model the batchwise statistics collection can lead to inaccurate statistics. " + "If the accuracy degradation after compression is unsatisfactory, then " + "the recomendation is to turn off batchwise statistics. If the results are still unsatisfactory, " + "provide a dataloader with batch_size = 1 to the calibration dataset." +) + + +def warning_model_no_batchwise_support( + graph: NNCFGraph, + advanced_quantization_parameters: Optional[AdvancedQuantizationParameters], + model_type: ModelType, + no_batchwise_support_metatypes: List[OperatorMetatype], +) -> None: + """ + Prints the warning message if batchwise statistics could lead to a significant accuracy drop. + + :param graph: Model's NNCFGraph. + :param advanced_quantization_parameters: AdvancedQuantizationParameters. + :param model_type: Model type algorithm option. + :param no_batchwise_support_metatypes: Meatypes having no batchwise statistics support. + """ + if ( + advanced_quantization_parameters + and advanced_quantization_parameters.batchwise_statistics + and (graph.get_nodes_by_metatypes(no_batchwise_support_metatypes) or model_type == ModelType.TRANSFORMER) + ): + nncf_logger.warning(BATCHWISE_STATISTICS_WARNING) + + +def _update_advanced_quantization_parameters( + advanced_parameters: Optional[AdvancedQuantizationParameters], calibration_dataset: Dataset +) -> AdvancedQuantizationParameters: + """ + Updates AdvancedQuantizationParameters depending on batch_size. + + :param advanced_parameters: Advanced quantization parameters for + fine-tuning the quantization algorithm. + :param calibration_dataset: A representative dataset for the + calibration process. + :return: Updated AdvancedQuantizationParameters. + """ + batch_size = calibration_dataset.get_batch_size() + if batch_size is not None and batch_size > 1: + if advanced_parameters is None: + advanced_parameters = AdvancedQuantizationParameters(batchwise_statistics=True) + elif advanced_parameters.batchwise_statistics is None: + advanced_parameters.batchwise_statistics = True + return advanced_parameters + @api(canonical_alias="nncf.quantize") def quantize( @@ -86,10 +139,11 @@ def quantize( :return: The quantized model. :rtype: TModel """ - if subset_size < 1: raise ValueError("Subset size must be positive.") + advanced_parameters = _update_advanced_quantization_parameters(advanced_parameters, calibration_dataset) + backend = get_backend(model) if backend == BackendType.OPENVINO: from nncf.openvino.quantization.quantize_model import quantize_impl @@ -223,6 +277,10 @@ def quantize_with_accuracy_control( :return: The quantized model. :rtype: TModel """ + advanced_quantization_parameters = _update_advanced_quantization_parameters( + advanced_quantization_parameters, calibration_dataset + ) + backend = get_backend(model) if backend == BackendType.OPENVINO: from nncf.openvino.quantization.quantize_model import quantize_with_accuracy_control_impl diff --git a/nncf/tensorflow/graph/converter.py b/nncf/tensorflow/graph/converter.py index 83c5d9f25cd..d7eb09a2a21 100644 --- a/nncf/tensorflow/graph/converter.py +++ b/nncf/tensorflow/graph/converter.py @@ -553,8 +553,7 @@ def _collect_edge_information(self): node_name = layer_name input_shapes = self._node_info[node_name]["input_shapes"] - layer_instance_input_port_id = 0 - for inbound_node in inbound_nodes: + for layer_instance_input_port_id, inbound_node in enumerate(inbound_nodes): producer_layer_name, producer_layer_instance, producer_layer_instance_output_port, _ = inbound_node if self._is_layer_shared(producer_layer_name): @@ -573,7 +572,6 @@ def _collect_edge_information(self): "to_node_input_port_id": layer_instance_input_port_id, "from_node_output_port_id": producer_layer_instance_output_port, } - layer_instance_input_port_id += 1 def convert(self) -> NNCFGraph: nncf_graph = NNCFGraph() diff --git a/nncf/tensorflow/graph/metatypes/keras_layers.py b/nncf/tensorflow/graph/metatypes/keras_layers.py index ce65fc63298..8f6636783bb 100644 --- a/nncf/tensorflow/graph/metatypes/keras_layers.py +++ b/nncf/tensorflow/graph/metatypes/keras_layers.py @@ -87,7 +87,7 @@ def get_all_aliases(cls) -> List[str]: return [cls.name] -@KERAS_LAYER_METATYPES.register() +@KERAS_LAYER_METATYPES.register(is_subtype=True) class TFDepthwiseConv1DSubLayerMetatype(TFLayerWithWeightsMetatype): name = "DepthwiseConv1D(Conv1DKerasLayer)" keras_layer_names = ["Conv1D", "Convolution1D"] @@ -112,7 +112,7 @@ class TFConv1DLayerMetatype(TFLayerWithWeightsMetatype): bias_attr_name = "bias" -@KERAS_LAYER_METATYPES.register() +@KERAS_LAYER_METATYPES.register(is_subtype=True) class TFDepthwiseConv2DSubLayerMetatype(TFLayerWithWeightsMetatype): name = "DepthwiseConv2D(Conv2DKerasLayer)" keras_layer_names = ["Conv2D", "Convolution2D"] @@ -137,7 +137,7 @@ class TFConv2DLayerMetatype(TFLayerWithWeightsMetatype): bias_attr_name = "bias" -@KERAS_LAYER_METATYPES.register() +@KERAS_LAYER_METATYPES.register(is_subtype=True) class TFDepthwiseConv3DSubLayerMetatype(TFLayerWithWeightsMetatype): name = "DepthwiseConv3D(Conv3DKerasLayer)" keras_layer_names = ["Conv3D", "Convolution3D"] diff --git a/nncf/torch/dynamic_graph/layer_attributes_handlers.py b/nncf/torch/dynamic_graph/layer_attributes_handlers.py index bc3c809ecc2..44eec3e0d3d 100644 --- a/nncf/torch/dynamic_graph/layer_attributes_handlers.py +++ b/nncf/torch/dynamic_graph/layer_attributes_handlers.py @@ -175,13 +175,18 @@ def apply_args_defaults( :return: A dictionary combining arguments from `args` and `kwargs` according to the `args_signature`. """ - # Manual defines function signature neccecery because inspection of torch function is not available + # Manual defines function signature necessary because inspection of torch function is not available # https://github.com/pytorch/pytorch/issues/74539 args_dict: Dict[str, Any] = dict() for idx, arg_desc in enumerate(args_signature): if isinstance(arg_desc, str): - args_dict[arg_desc] = kwargs.get(arg_desc, args[idx]) + if arg_desc in kwargs: + args_dict[arg_desc] = kwargs[arg_desc] + elif idx < len(args): + args_dict[arg_desc] = args[idx] + else: + raise ValueError("Incorrect args_signature, can not by applied to function arguments.") elif isinstance(arg_desc, Tuple): arg_name, default = arg_desc args_dict[arg_name] = kwargs.get(arg_name, args[idx] if idx < len(args) else default) diff --git a/nncf/torch/external_hook.py b/nncf/torch/external_hook.py index 7983e74da7e..60902afbbe2 100644 --- a/nncf/torch/external_hook.py +++ b/nncf/torch/external_hook.py @@ -11,7 +11,7 @@ from typing import Any -from nncf.torch.dynamic_graph.context import TracingContext +from nncf.torch.dynamic_graph.context import get_current_context EXTERNAL_OP_STORAGE_NAME = "external_op" @@ -26,17 +26,15 @@ class ExternalOpCallHook: the base module execution. """ - def __init__(self, storage_name: str, context: TracingContext, storage_key: str): + def __init__(self, storage_name: str, storage_key: str): """ :param storage_name: Attribute name of a model NNCFInterface. - :param context: Current tracing context. :param storage_key: Key to retrieve callable hook """ self._storage_name = storage_name - self._compressed_context = context self._storage_key = storage_key def __call__(self, *args: Any, **kwargs) -> Any: - replica = self._compressed_context.base_module_thread_local_replica + replica = get_current_context().base_module_thread_local_replica storage = getattr(replica.nncf, self._storage_name) return storage[self._storage_key](*args, **kwargs) diff --git a/nncf/torch/graph/operator_metatypes.py b/nncf/torch/graph/operator_metatypes.py index 7171d7fc732..b99bab5ebab 100644 --- a/nncf/torch/graph/operator_metatypes.py +++ b/nncf/torch/graph/operator_metatypes.py @@ -167,7 +167,7 @@ class PTNoopMetatype(PTOperatorMetatype): } -@PT_OPERATOR_METATYPES.register() +@PT_OPERATOR_METATYPES.register(is_subtype=True) class PTDepthwiseConv1dSubtype(PTDepthwiseConvOperatorSubtype): name = "Conv1DOp" hw_config_name = [HWConfigOpName.DEPTHWISECONVOLUTION] @@ -178,7 +178,7 @@ class PTDepthwiseConv1dSubtype(PTDepthwiseConvOperatorSubtype): bias_port_id = 2 -@PT_OPERATOR_METATYPES.register() +@PT_OPERATOR_METATYPES.register(is_subtype=True) class PTModuleConv1dMetatype(PTModuleOperatorSubtype): name = "Conv1DOp" hw_config_names = [HWConfigOpName.CONVOLUTION] @@ -202,7 +202,7 @@ class PTConv1dMetatype(PTOperatorMetatype): bias_port_id = 2 -@PT_OPERATOR_METATYPES.register() +@PT_OPERATOR_METATYPES.register(is_subtype=True) class PTDepthwiseConv2dSubtype(PTDepthwiseConvOperatorSubtype): name = "Conv2DOp" hw_config_names = [HWConfigOpName.DEPTHWISECONVOLUTION] @@ -213,7 +213,7 @@ class PTDepthwiseConv2dSubtype(PTDepthwiseConvOperatorSubtype): bias_port_id = 2 -@PT_OPERATOR_METATYPES.register() +@PT_OPERATOR_METATYPES.register(is_subtype=True) class PTModuleConv2dMetatype(PTModuleOperatorSubtype): name = "Conv2DOp" hw_config_names = [HWConfigOpName.CONVOLUTION] @@ -237,7 +237,7 @@ class PTConv2dMetatype(PTOperatorMetatype): bias_port_id = 2 -@PT_OPERATOR_METATYPES.register() +@PT_OPERATOR_METATYPES.register(is_subtype=True) class PTDepthwiseConv3dSubtype(PTDepthwiseConvOperatorSubtype): name = "Conv3DOp" hw_config_names = [HWConfigOpName.DEPTHWISECONVOLUTION] @@ -248,7 +248,7 @@ class PTDepthwiseConv3dSubtype(PTDepthwiseConvOperatorSubtype): bias_port_id = 2 -@PT_OPERATOR_METATYPES.register() +@PT_OPERATOR_METATYPES.register(is_subtype=True) class PTModuleConv3dMetatype(PTModuleOperatorSubtype): name = "Conv3DOp" hw_config_names = [HWConfigOpName.CONVOLUTION] @@ -272,7 +272,7 @@ class PTConv3dMetatype(PTOperatorMetatype): bias_port_id = 2 -@PT_OPERATOR_METATYPES.register() +@PT_OPERATOR_METATYPES.register(is_subtype=True) class PTModuleConvTranspose1dMetatype(PTModuleOperatorSubtype): name = "ConvTranspose1DOp" hw_config_names = [HWConfigOpName.CONVOLUTION] @@ -295,7 +295,7 @@ class PTConvTranspose1dMetatype(PTOperatorMetatype): bias_port_id = 2 -@PT_OPERATOR_METATYPES.register() +@PT_OPERATOR_METATYPES.register(is_subtype=True) class PTModuleConvTranspose2dMetatype(PTModuleOperatorSubtype): name = "ConvTranspose2DOp" hw_config_names = [HWConfigOpName.CONVOLUTION] @@ -318,7 +318,7 @@ class PTConvTranspose2dMetatype(PTOperatorMetatype): bias_port_id = 2 -@PT_OPERATOR_METATYPES.register() +@PT_OPERATOR_METATYPES.register(is_subtype=True) class PTModuleConvTranspose3dMetatype(PTModuleOperatorSubtype): name = "ConvTranspose3DOp" hw_config_names = [HWConfigOpName.CONVOLUTION] @@ -341,7 +341,7 @@ class PTConvTranspose3dMetatype(PTOperatorMetatype): bias_port_id = 2 -@PT_OPERATOR_METATYPES.register() +@PT_OPERATOR_METATYPES.register(is_subtype=True) class PTModuleDeformConv2dMetatype(PTModuleOperatorSubtype): name = "DeformConv2dOp" module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["deform_conv2d"]} @@ -358,7 +358,7 @@ class PTDeformConv2dMetatype(PTOperatorMetatype): weight_port_ids = [2] -@PT_OPERATOR_METATYPES.register() +@PT_OPERATOR_METATYPES.register(is_subtype=True) class PTModuleLinearMetatype(PTModuleOperatorSubtype): name = "LinearOp" module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["linear"]} @@ -428,7 +428,7 @@ class PTLeakyRELUMetatype(PTOperatorMetatype): num_expected_input_edges = 1 -@PT_OPERATOR_METATYPES.register() +@PT_OPERATOR_METATYPES.register(is_subtype=True) class PTModuleLayerNormMetatype(PTModuleOperatorSubtype): name = "LayerNormOp" module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["layer_norm"]} @@ -445,7 +445,7 @@ class PTLayerNormMetatype(PTOperatorMetatype): num_expected_input_edges = 1 -@PT_OPERATOR_METATYPES.register() +@PT_OPERATOR_METATYPES.register(is_subtype=True) class PTModuleGroupNormMetatype(PTModuleOperatorSubtype): name = "GroupNormOp" module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["group_norm"]} @@ -630,7 +630,7 @@ class PTThresholdMetatype(PTOperatorMetatype): module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["threshold"]} -@PT_OPERATOR_METATYPES.register() +@PT_OPERATOR_METATYPES.register(is_subtype=True) class PTModuleBatchNormMetatype(PTModuleOperatorSubtype): name = "BatchNormOp" module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["batch_norm"]} @@ -821,7 +821,7 @@ class PTExpandAsMetatype(PTOperatorMetatype): module_to_function_names = {NamespaceTarget.TORCH_TENSOR: ["expand_as"]} -@PT_OPERATOR_METATYPES.register() +@PT_OPERATOR_METATYPES.register(is_subtype=True) class PTModuleEmbeddingMetatype(PTModuleOperatorSubtype): name = "EmbeddingOp" module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["embedding"]} @@ -838,7 +838,7 @@ class PTEmbeddingMetatype(PTOperatorMetatype): weight_port_ids = [1] -@PT_OPERATOR_METATYPES.register() +@PT_OPERATOR_METATYPES.register(is_subtype=True) class PTModuleEmbeddingBagMetatype(PTModuleOperatorSubtype): name = "EmbeddingBagOp" module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["embedding_bag"]} @@ -1063,3 +1063,14 @@ def get_operator_metatypes() -> List[Type[OperatorMetatype]]: ] QUANTIZE_NODE_TYPES = ["symmetric_quantize", "asymmetric_quantize"] + +# These metatypes mix outputs for different samples into one axis. +# If reducers and aggregators collect statistics at the output of the following operations, +# assuming that 0-axis is batch axis, they get only 1 value instead of batch_size values. +# It could lead to inaccurate/incorrect statistics result. +OPERATIONS_OUTPUT_HAS_NO_BATCH_AXIS = [ + PTEmbeddingMetatype, + PTEmbeddingBagMetatype, + PTModuleEmbeddingBagMetatype, + PTModuleEmbeddingMetatype, +] diff --git a/nncf/torch/graph/transformations/command_creation.py b/nncf/torch/graph/transformations/command_creation.py index ac52802c039..6146803ae19 100644 --- a/nncf/torch/graph/transformations/command_creation.py +++ b/nncf/torch/graph/transformations/command_creation.py @@ -9,13 +9,21 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import List, Union + from torch import Tensor from nncf.common.graph.graph import NNCFNode from nncf.common.graph.transformations.commands import TargetType +from nncf.common.graph.transformations.commands import TransformationPriority +from nncf.common.quantization.structs import NonWeightQuantizerId +from nncf.torch.graph.transformations.commands import ExtraCompressionModuleType from nncf.torch.graph.transformations.commands import PTBiasCorrectionCommand +from nncf.torch.graph.transformations.commands import PTInsertionCommand +from nncf.torch.graph.transformations.commands import PTSharedFnInsertionCommand from nncf.torch.graph.transformations.commands import PTTargetPoint from nncf.torch.graph.transformations.commands import PTWeightUpdateCommand +from nncf.torch.quantization.layers import BaseQuantizer def create_bias_correction_command(node: NNCFNode, bias_value: Tensor) -> PTBiasCorrectionCommand: @@ -40,3 +48,37 @@ def create_command_to_update_weight(node: NNCFNode, weight_value: Tensor) -> PTW """ target_point = PTTargetPoint(TargetType.LAYER, node.node_name) return PTWeightUpdateCommand(target_point, weight_value) + + +def create_quantizer_insertion_command( + target_point: PTTargetPoint, quantizer: BaseQuantizer +) -> Union[PTInsertionCommand, PTSharedFnInsertionCommand]: + if target_point.type is TargetType.OPERATION_WITH_WEIGHTS: + return PTInsertionCommand(target_point, quantizer, TransformationPriority.QUANTIZATION_PRIORITY) + + quantizer_id = NonWeightQuantizerId(target_point.target_node_name, target_point.input_port_id) + storage_key = str(quantizer_id) + return PTSharedFnInsertionCommand( + target_points=[target_point], + fn=quantizer, + op_unique_name=storage_key, + compression_module_type=ExtraCompressionModuleType.EXTERNAL_QUANTIZER, + priority=TransformationPriority.QUANTIZATION_PRIORITY, + ) + + +def create_shared_quantizer_insertion_command( + target_points: List[PTTargetPoint], quantizer: BaseQuantizer +) -> PTSharedFnInsertionCommand: + quantizers_ids = [] + for target_point in target_points: + quantizers_ids.append(NonWeightQuantizerId(target_point.target_node_name, target_point.input_port_id)) + + storage_key = ";".join(str(quantizer_id) for quantizer_id in sorted(quantizers_ids, key=str)) + return PTSharedFnInsertionCommand( + target_points=target_points, + fn=quantizer, + op_unique_name=storage_key, + compression_module_type=ExtraCompressionModuleType.EXTERNAL_QUANTIZER, + priority=TransformationPriority.QUANTIZATION_PRIORITY, + ) diff --git a/nncf/torch/graph/transformations/commands.py b/nncf/torch/graph/transformations/commands.py index c7793f27a28..b2461277a5f 100644 --- a/nncf/torch/graph/transformations/commands.py +++ b/nncf/torch/graph/transformations/commands.py @@ -9,6 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from enum import Enum from typing import Any, Callable, Dict, List import torch @@ -56,6 +57,7 @@ def __eq__(self, other: "PTTargetPoint"): isinstance(other, PTTargetPoint) and self.target_type == other.target_type and self.target_node_name == other.target_node_name + and self.input_port_id == other.input_port_id ) def __str__(self): @@ -150,12 +152,18 @@ def requires_graph_rebuild(self): return self.priority == TransformationPriority.QUANTIZATION_PRIORITY +class ExtraCompressionModuleType(Enum): + EXTERNAL_QUANTIZER = 0 + EXTERNAL_OP = 1 + + class PTSharedFnInsertionCommand(PTTransformationCommand): def __init__( self, target_points: List[PTTargetPoint], fn: Callable, op_unique_name: str, + compression_module_type: ExtraCompressionModuleType = ExtraCompressionModuleType.EXTERNAL_OP, priority: TransformationPriority = TransformationPriority.DEFAULT_PRIORITY, hooks_group_name: str = DEFAULT_HOOKS_GROUP_NAME, ): @@ -163,6 +171,7 @@ def __init__( self.target_points = target_points self.fn = fn self.op_name = op_unique_name + self.compression_module_type = compression_module_type self.priority = priority self.hooks_group_name = hooks_group_name @@ -170,25 +179,6 @@ def requires_graph_rebuild(self): return True -class PTQuantizerInsertionCommand(PTTransformationCommand): - """ - Insertion quantizer operation to the models. - """ - - def __init__( - self, - point: PTTargetPoint, - quantizer: "BaseQuantizer", # noqa: F821 - hooks_group_name: str = DEFAULT_HOOKS_GROUP_NAME, - ): - super().__init__(TransformationType.INSERT, point) - self.quantizer = quantizer - self.hooks_group_name = hooks_group_name - - def requires_graph_rebuild(self): - return True - - class PTModelExtractionWithFusedBiasCommand(PTCommand): """ Extracts sequence by name with node that contain fused bias. diff --git a/nncf/torch/model_graph_manager.py b/nncf/torch/model_graph_manager.py index 83a0f02b6d4..ad788520705 100644 --- a/nncf/torch/model_graph_manager.py +++ b/nncf/torch/model_graph_manager.py @@ -18,10 +18,11 @@ from nncf.common.graph.graph import NNCFNode from nncf.common.graph.operator_metatypes import CONST_NOOP_METATYPES from nncf.torch.dynamic_graph.context import PreHookId +from nncf.torch.external_hook import ExternalOpCallHook from nncf.torch.graph import operator_metatypes as om from nncf.torch.nncf_network import NNCFNetwork -from nncf.torch.quantization.external_quantizer import ExternalQuantizerCallHook from nncf.torch.quantization.layers import AsymmetricQuantizer +from nncf.torch.quantization.layers import BaseQuantizer from nncf.torch.quantization.layers import SymmetricQuantizer CONV_META_TYPES = [ @@ -295,7 +296,9 @@ def get_fake_quantizer( hook_container = model.nncf._compressed_context._post_hooks.get(op_addr, {}) for call_hook in hook_container.values(): - if isinstance(call_hook, ExternalQuantizerCallHook): + if isinstance(call_hook, ExternalOpCallHook): storage = getattr(model.nncf, call_hook._storage_name) - return storage[call_hook._storage_key] + module = storage[call_hook._storage_key] + if isinstance(module, BaseQuantizer): + return module return None diff --git a/nncf/torch/model_transformer.py b/nncf/torch/model_transformer.py index 88f1cc101df..19c2c647b05 100644 --- a/nncf/torch/model_transformer.py +++ b/nncf/torch/model_transformer.py @@ -11,8 +11,10 @@ import copy from collections import defaultdict -from typing import Callable, Dict, List, Tuple +from functools import partial +from typing import Callable, Dict, List, Optional, Tuple +import torch from torch import Tensor from torch import nn from torch.nn.parameter import Parameter @@ -20,23 +22,20 @@ from nncf.common.graph.model_transformer import ModelTransformer from nncf.common.graph.transformations.commands import TargetType from nncf.common.graph.transformations.commands import TransformationPriority -from nncf.common.quantization.structs import NonWeightQuantizerId -from nncf.torch.external_hook import EXTERNAL_OP_STORAGE_NAME +from nncf.torch.graph.transformations.commands import ExtraCompressionModuleType from nncf.torch.graph.transformations.commands import PTBiasCorrectionCommand from nncf.torch.graph.transformations.commands import PTInsertionCommand from nncf.torch.graph.transformations.commands import PTModelExtractionWithFusedBiasCommand -from nncf.torch.graph.transformations.commands import PTQuantizerInsertionCommand from nncf.torch.graph.transformations.commands import PTSharedFnInsertionCommand from nncf.torch.graph.transformations.commands import PTTargetPoint from nncf.torch.graph.transformations.commands import PTWeightUpdateCommand from nncf.torch.graph.transformations.layout import PTTransformationLayout from nncf.torch.model_analyzer import get_potential_fused_node from nncf.torch.module_operations import UpdateWeight -from nncf.torch.nncf_network import ExtraCompressionModuleType from nncf.torch.nncf_network import NNCFNetwork from nncf.torch.nncf_network import PTInsertionPoint +from nncf.torch.nncf_network import compression_module_type_to_attr_name from nncf.torch.quantization.external_quantizer import ExternalOpCallHook -from nncf.torch.quantization.external_quantizer import ExternalQuantizerCallHook from nncf.torch.utils import get_model_device from nncf.torch.utils import is_multidevice @@ -49,12 +48,15 @@ class PTModelTransformer(ModelTransformer): def __init__(self, model: NNCFNetwork): super().__init__(model) + device = None + if not is_multidevice(model): + device = get_model_device(model) + self._command_transformation_ordered_pairs = [ (PTModelExtractionWithFusedBiasCommand, self._apply_extraction_with_fused_bias_transformations), - (PTInsertionCommand, self._apply_insertion_transformations), - (PTQuantizerInsertionCommand, self._apply_quantizer_insertion_transformations), + (PTInsertionCommand, partial(self._apply_insertion_transformations, device=device)), + (PTSharedFnInsertionCommand, partial(self._apply_shared_nodes_insertion, device=device)), (PTBiasCorrectionCommand, self._apply_bias_correction_transformations), - (PTSharedFnInsertionCommand, self._apply_shared_nodes_insertion), (PTWeightUpdateCommand, self._apply_weights_update_transformations), ] @@ -78,12 +80,16 @@ def transform(self, transformation_layout: PTTransformationLayout) -> NNCFNetwor return model @staticmethod - def _apply_insertion_transformations(model: NNCFNetwork, transformations: List[PTInsertionCommand]) -> NNCFNetwork: + def _apply_insertion_transformations( + model: NNCFNetwork, transformations: List[PTInsertionCommand], device: Optional[torch.device] + ) -> NNCFNetwork: """ Applies insertion transformations to the model. :param model: Model to apply transformations. :param transformations: List of the bias correction transformations. + :param device: Target device for the insertion functions. Applies only to + functions which are subclassed from torch.nn.Module. Do nothing in case device is None. :return: A modified NNCFNetwork. """ node_to_op_address_mapping = model.nncf.get_node_to_op_address_mapping() @@ -98,7 +104,11 @@ def _apply_insertion_transformations(model: NNCFNetwork, transformations: List[P input_port_id=target_point.input_port_id, replaced_modules=model.nncf.replace_modules, ) + fn = transformation_command.fn + if device is not None and isinstance(fn, torch.nn.Module): + fn.to(device) + if model.nncf.replace_modules and target_point.type is TargetType.OPERATION_WITH_WEIGHTS: fn = UpdateWeight(fn) tup = (fn, transformation_command) @@ -113,21 +123,63 @@ def _apply_insertion_transformations(model: NNCFNetwork, transformations: List[P @staticmethod def _apply_shared_nodes_insertion( - model: NNCFNetwork, transformations: List[PTSharedFnInsertionCommand] + model: NNCFNetwork, + transformations: List[PTSharedFnInsertionCommand], + device: Optional[torch.device], ) -> NNCFNetwork: - compression_model_type = ExtraCompressionModuleType.EXTERNAL_OP + """ + Applies insertion of PTSharedFnInsertionCommand commands. For each command method inserts + a torch module to the NNCFNetwork and inserts call hooks for each command target points. + + :param model: Model to apply transformations. + :param transformations: List of the bias correction transformations. + :param device: Target device for the insertion functions. Applies only to + functions which are subclassed from torch.nn.Module. Do nothing in case device is None. + :return: A modified NNCFNetwork. + """ + compression_type_vs_transformations = defaultdict(list) + for transformation in transformations: + compression_type_vs_transformations[transformation.compression_module_type].append(transformation) + + for compression_module_type, transformations in compression_type_vs_transformations.items(): + model = PTModelTransformer._apply_shared_node_insertion_with_compression_type( + model, transformations, device, compression_module_type + ) + return model + + @staticmethod + def _apply_shared_node_insertion_with_compression_type( + model: NNCFNetwork, + transformations: List[PTSharedFnInsertionCommand], + device: Optional[torch.device], + compression_module_type: ExtraCompressionModuleType, + ): + """ + Does _apply_shared_nodes_insertion with specified compression model type which will be + used for each transformation command. - if not model.nncf.is_compression_module_registered(compression_model_type): - model.nncf.register_compression_module_type(compression_model_type) + :param model: Model to apply transformations. + :param transformations: List of the bias correction transformations. + :param device: Target device for the insertion functions. Applies only to + functions which are subclassed from torch.nn.Module. Do nothing in case device is None. + :param compression_module_type: Common compression module type for all commands. + :return: A modified NNCFNetwork. + """ + if not model.nncf.is_compression_module_registered(compression_module_type): + model.nncf.register_compression_module_type(compression_module_type) insertion_commands: List[PTInsertionCommand] = [] for shared_command in transformations: - model.nncf.add_compression_module(shared_command.op_name, shared_command.fn, compression_model_type) + fn = shared_command.fn + if device is not None: + fn.to(device) + + model.nncf.add_compression_module(shared_command.op_name, fn, compression_module_type) for target_point in shared_command.target_points: fn = ExternalOpCallHook( - EXTERNAL_OP_STORAGE_NAME, model.nncf.get_tracing_context(), shared_command.op_name + compression_module_type_to_attr_name(compression_module_type), shared_command.op_name ) insertion_commands.append( PTInsertionCommand( @@ -138,47 +190,7 @@ def _apply_shared_nodes_insertion( ) ) - return PTModelTransformer._apply_insertion_transformations(model, insertion_commands) - - @staticmethod - def _apply_quantizer_insertion_transformations( - model: NNCFNetwork, transformations: List[PTQuantizerInsertionCommand] - ) -> NNCFNetwork: - """ - Applies quantizer insertion transformations on the model. - - :param model: Model to apply transformations. - :param transformations: List of the OVQuantizerInsertionCommand transformations. - :return: Model with inserted FakeQuantize nodes. - """ - compression_model_type = ExtraCompressionModuleType.EXTERNAL_QUANTIZER - - if not model.nncf.is_compression_module_registered(compression_model_type): - model.nncf.register_compression_module_type(compression_model_type) - - insertion_commands: List[PTInsertionCommand] = [] - device = None - if not is_multidevice(model): - device = get_model_device(model) - - for transformation_command in transformations: - target_point: PTTargetPoint = transformation_command.target_point - quantizer_module = transformation_command.quantizer - if device is not None: - quantizer_module = quantizer_module.to(device) - fn = quantizer_module - - if target_point.type is not TargetType.OPERATION_WITH_WEIGHTS: - quantizer_id = NonWeightQuantizerId(target_point.target_node_name, target_point.input_port_id) - storage_key = str(quantizer_id) - model.nncf.add_compression_module(storage_key, quantizer_module, compression_model_type) - fn = ExternalQuantizerCallHook(model.nncf.get_tracing_context(), storage_key) - - insertion_commands.append( - PTInsertionCommand(target_point, fn, TransformationPriority.QUANTIZATION_PRIORITY) - ) - - return PTModelTransformer._apply_insertion_transformations(model, insertion_commands) + return PTModelTransformer._apply_insertion_transformations(model, insertion_commands, device) @staticmethod def _apply_extraction_with_fused_bias_transformations( diff --git a/nncf/torch/nested_objects_traversal.py b/nncf/torch/nested_objects_traversal.py index f8b7f942e7d..1507b3ade12 100644 --- a/nncf/torch/nested_objects_traversal.py +++ b/nncf/torch/nested_objects_traversal.py @@ -28,7 +28,7 @@ def is_tuple(obj) -> bool: def is_named_tuple(obj) -> bool: - return is_tuple(obj) and (obj.__class__ != tuple) + return is_tuple(obj) and (obj.__class__ is not tuple) def maybe_get_iterator(obj): diff --git a/nncf/torch/nncf_network.py b/nncf/torch/nncf_network.py index 5494e3a5620..a27d338a77a 100644 --- a/nncf/torch/nncf_network.py +++ b/nncf/torch/nncf_network.py @@ -17,7 +17,6 @@ from contextlib import contextmanager from copy import deepcopy from dataclasses import dataclass -from enum import Enum from enum import IntEnum from typing import Callable, Dict, Iterator, List, Optional, Tuple, TypeVar @@ -67,6 +66,7 @@ from nncf.torch.graph.operator_metatypes import OPERATORS_WITH_WEIGHTS_METATYPES from nncf.torch.graph.operator_metatypes import PTSplitMetatype from nncf.torch.graph.transformations.commands import DEFAULT_HOOKS_GROUP_NAME +from nncf.torch.graph.transformations.commands import ExtraCompressionModuleType from nncf.torch.graph.transformations.commands import PTTargetPoint from nncf.torch.knowledge_distillation.knowledge_distillation_handler import KnowledgeDistillationLossHandler from nncf.torch.layer_utils import _NNCFModuleMixin @@ -142,11 +142,6 @@ def __hash__(self): return hash(str(self)) -class ExtraCompressionModuleType(Enum): - EXTERNAL_QUANTIZER = 0 - EXTERNAL_OP = 1 - - @dataclass class PTGraphPair: """ @@ -576,7 +571,7 @@ def is_scope_in_nncf_module_scope(self, scope: Scope) -> bool: return False def register_compression_module_type(self, compression_module_type: ExtraCompressionModuleType): - attr_name = self._compression_module_type_to_attr_name(compression_module_type) + attr_name = compression_module_type_to_attr_name(compression_module_type) if compression_module_type in self._extra_module_types: raise nncf.ValidationError(f"Module type {compression_module_type} is already registered") @@ -586,7 +581,7 @@ def register_compression_module_type(self, compression_module_type: ExtraCompres def add_compression_module( self, module_key: str, module: nn.Module, compression_module_type: ExtraCompressionModuleType ): - attr_name = self._compression_module_type_to_attr_name(compression_module_type) + attr_name = compression_module_type_to_attr_name(compression_module_type) if compression_module_type not in self._extra_module_types: raise nncf.InternalError(f"Module type {compression_module_type} was not registered") storage = self.__getattr__(attr_name) @@ -595,7 +590,7 @@ def add_compression_module( storage[module_key] = module def get_compression_modules_by_type(self, compression_module_type: ExtraCompressionModuleType) -> nn.ModuleDict: - attr_name = self._compression_module_type_to_attr_name(compression_module_type) + attr_name = compression_module_type_to_attr_name(compression_module_type) if compression_module_type not in self._extra_module_types: raise nncf.InternalError(f"Module type {compression_module_type} was not registered") return self.__getattr__(attr_name) @@ -609,20 +604,8 @@ def is_compression_module_registered(self, compression_module_type: ExtraCompres """ return compression_module_type in self._extra_module_types - @staticmethod - def _compression_module_type_to_attr_name(compression_module_type: ExtraCompressionModuleType): - """ - Required for backward compatibility with checkpoints that store function and activation - quantizers directly under corresponding attributes of NNCFNetwork. - """ - if compression_module_type == ExtraCompressionModuleType.EXTERNAL_QUANTIZER: - return EXTERNAL_QUANTIZERS_STORAGE_NAME - if compression_module_type == ExtraCompressionModuleType.EXTERNAL_OP: - return EXTERNAL_OP_STORAGE_NAME - raise nncf.ValidationError("Unknown extra module type") - def sort_compression_modules(self, compression_module_type: ExtraCompressionModuleType): - attr_name = self._compression_module_type_to_attr_name(compression_module_type) + attr_name = compression_module_type_to_attr_name(compression_module_type) if compression_module_type not in self._extra_module_types: raise nncf.InternalError("Module type {} was not registered".format(compression_module_type)) module_dict = self.__getattr__(attr_name) @@ -1137,3 +1120,15 @@ def hook_fn( def close(self): self.hook.remove() + + +def compression_module_type_to_attr_name(compression_module_type: ExtraCompressionModuleType): + """ + Required for backward compatibility with checkpoints that store function and activation + quantizers directly under corresponding attributes of NNCFNetwork. + """ + if compression_module_type == ExtraCompressionModuleType.EXTERNAL_QUANTIZER: + return EXTERNAL_QUANTIZERS_STORAGE_NAME + if compression_module_type == ExtraCompressionModuleType.EXTERNAL_OP: + return EXTERNAL_OP_STORAGE_NAME + raise nncf.ValidationError("Unknown extra module type") diff --git a/nncf/torch/pruning/filter_pruning/global_ranking/evolutionary_optimization.py b/nncf/torch/pruning/filter_pruning/global_ranking/evolutionary_optimization.py index def4266cbd9..9d9d69a8de4 100644 --- a/nncf/torch/pruning/filter_pruning/global_ranking/evolutionary_optimization.py +++ b/nncf/torch/pruning/filter_pruning/global_ranking/evolutionary_optimization.py @@ -20,6 +20,11 @@ from torch import optim from nncf.config.config import NNCFConfig +from nncf.config.schemata.defaults import PRUNING_LEGR_GENERATIONS +from nncf.config.schemata.defaults import PRUNING_LEGR_MUTATE_PERCENT +from nncf.config.schemata.defaults import PRUNING_LEGR_NUM_SAMPLES +from nncf.config.schemata.defaults import PRUNING_LEGR_POPULATION_SIZE +from nncf.config.schemata.defaults import PRUNING_LEGR_SIGMA_SCALE from nncf.torch.utils import get_filters_num @@ -48,11 +53,11 @@ def __init__(self, initial_filter_norms: Dict, hparams: Dict, random_seed: int): """ self.random_seed = random_seed # Optimizer hyper-params - self.population_size = hparams.get("population_size", 64) - self.num_generations = hparams.get("num_generations", 400) - self.num_samples = hparams.get("num_samples", 16) - self.mutate_percent = hparams.get("mutate_percent", 0.1) - self.scale_sigma = hparams.get("sigma_scale", 1) + self.population_size = hparams.get("population_size", PRUNING_LEGR_POPULATION_SIZE) + self.num_generations = hparams.get("num_generations", PRUNING_LEGR_GENERATIONS) + self.num_samples = hparams.get("num_samples", PRUNING_LEGR_NUM_SAMPLES) + self.mutate_percent = hparams.get("mutate_percent", PRUNING_LEGR_MUTATE_PERCENT) + self.scale_sigma = hparams.get("sigma_scale", PRUNING_LEGR_SIGMA_SCALE) self.max_reward = -np.inf self.mean_rewards = [] diff --git a/nncf/torch/pruning/filter_pruning/global_ranking/legr.py b/nncf/torch/pruning/filter_pruning/global_ranking/legr.py index 2949ded0469..d307151eec2 100644 --- a/nncf/torch/pruning/filter_pruning/global_ranking/legr.py +++ b/nncf/torch/pruning/filter_pruning/global_ranking/legr.py @@ -15,7 +15,11 @@ from nncf.common.logging import nncf_logger from nncf.config.schemata.defaults import PRUNING_LEGR_GENERATIONS from nncf.config.schemata.defaults import PRUNING_LEGR_MAX_PRUNING +from nncf.config.schemata.defaults import PRUNING_LEGR_MUTATE_PERCENT +from nncf.config.schemata.defaults import PRUNING_LEGR_NUM_SAMPLES +from nncf.config.schemata.defaults import PRUNING_LEGR_POPULATION_SIZE from nncf.config.schemata.defaults import PRUNING_LEGR_RANDOM_SEED +from nncf.config.schemata.defaults import PRUNING_LEGR_SIGMA_SCALE from nncf.config.schemata.defaults import PRUNING_LEGR_TRAIN_STEPS from nncf.torch.pruning.filter_pruning.global_ranking.evolutionary_optimization import EvolutionOptimizer from nncf.torch.pruning.filter_pruning.global_ranking.evolutionary_optimization import LeGREvolutionEnv @@ -38,6 +42,10 @@ def __init__( generations: int = PRUNING_LEGR_GENERATIONS, max_pruning: float = PRUNING_LEGR_MAX_PRUNING, random_seed: int = PRUNING_LEGR_RANDOM_SEED, + population_size: int = PRUNING_LEGR_POPULATION_SIZE, + num_samples: int = PRUNING_LEGR_NUM_SAMPLES, + mutate_percent: float = PRUNING_LEGR_MUTATE_PERCENT, + scale_sigma: float = PRUNING_LEGR_SIGMA_SCALE, ): """ Initializing all necessary structures for optimization- LeGREvolutionEnv environment and EvolutionOptimizer @@ -53,10 +61,20 @@ def __init__( self.num_generations = generations self.max_pruning = max_pruning self.train_steps = train_steps + self.population_size = population_size + self.num_samples = num_samples + self.mutate_percent = mutate_percent + self.scale_sigma = scale_sigma self.pruner = LeGRPruner(pruning_ctrl, target_model) init_filter_norms = self.pruner.init_filter_norms - agent_hparams = {"num_generations": self.num_generations} + agent_hparams = { + "num_generations": self.num_generations, + "population_size": self.population_size, + "num_samples": self.num_samples, + "mutate_percent": self.mutate_percent, + "sigma_scale": self.scale_sigma, + } self.agent = EvolutionOptimizer(init_filter_norms, agent_hparams, random_seed) self.env = LeGREvolutionEnv( self.pruner, diff --git a/nncf/torch/quantization/algo.py b/nncf/torch/quantization/algo.py index 278365d8289..1cbeeac4f69 100644 --- a/nncf/torch/quantization/algo.py +++ b/nncf/torch/quantization/algo.py @@ -83,6 +83,7 @@ from nncf.torch.graph.operator_metatypes import PTCatMetatype from nncf.torch.graph.operator_metatypes import PTDepthwiseConv2dSubtype from nncf.torch.graph.operator_metatypes import PTModuleConv2dMetatype +from nncf.torch.graph.transformations.commands import ExtraCompressionModuleType from nncf.torch.graph.transformations.commands import PTInsertionCommand from nncf.torch.graph.transformations.commands import PTTargetPoint from nncf.torch.graph.transformations.commands import TransformationPriority @@ -90,7 +91,6 @@ from nncf.torch.hardware.config import PTHWConfig from nncf.torch.initialization import SimpleDataLoaderRunner from nncf.torch.module_operations import UpdatePaddingValue -from nncf.torch.nncf_network import ExtraCompressionModuleType from nncf.torch.nncf_network import LoadStateListener from nncf.torch.nncf_network import NNCFNetwork from nncf.torch.quantization.adjust_padding import AdjustPaddingArgs @@ -534,7 +534,7 @@ def _parse_range_init_params(self) -> Optional[PTRangeInitParams]: return PTRangeInitParams(**range_init_params) if range_init_params is not None else None def _parse_precision_init_params(self, initializer_config: Dict) -> Tuple[str, BasePrecisionInitParams]: - init_precision_config = initializer_config.get("precision", None) + init_precision_config = initializer_config.get("precision") if not init_precision_config: return None, None precision_init_type = init_precision_config.get("type", "manual") @@ -934,7 +934,7 @@ def _build_insertion_commands_list_for_quantizer_setup( range_init_minmax_values = None if minmax_values_for_range_init: - minmax_stat = minmax_values_for_range_init[qp_id] if qp_id in minmax_values_for_range_init else None + minmax_stat = minmax_values_for_range_init.get(qp_id) if minmax_stat is not None: range_init_minmax_values = (minmax_stat.min_values, minmax_stat.max_values) @@ -1084,7 +1084,7 @@ def ip_str_repr_key_lambda(x): min_values = None max_values = None for qp_id in sorted_qp_ids: - minmax_stat = minmax_values_for_range_init[qp_id] if qp_id in minmax_values_for_range_init else None + minmax_stat = minmax_values_for_range_init.get(qp_id) if minmax_stat is None: continue @@ -1208,15 +1208,11 @@ def is_weights(ip: PTTargetPoint) -> bool: # share the single module and this would be impossible for multiple weight quantizer sharing if # the corresponding UpdateWeights operations contained real modules (these would simply get copied # by PyTorch internals) - callable_obj = ExternalQuantizerCallHook( - target_model.nncf.get_tracing_context(), external_quantizer_storage_key, self._debug_interface - ) + callable_obj = ExternalQuantizerCallHook(external_quantizer_storage_key, self._debug_interface) else: # Hooks will be identical for each affected op_address in the linked scenario # - will call one and the same quantizer - callable_obj = ExternalQuantizerCallHook( - target_model.nncf.get_tracing_context(), external_quantizer_storage_key, self._debug_interface - ) + callable_obj = ExternalQuantizerCallHook(external_quantizer_storage_key, self._debug_interface) nncf_logger.debug( f"Performing " diff --git a/nncf/torch/quantization/debug_interface.py b/nncf/torch/quantization/debug_interface.py index 60798db1225..b46759ba466 100644 --- a/nncf/torch/quantization/debug_interface.py +++ b/nncf/torch/quantization/debug_interface.py @@ -59,7 +59,7 @@ def __init__(self): self._strict_forward = False def init_actual(self, owner_model: NNCFNetwork): - from nncf.torch.nncf_network import ExtraCompressionModuleType + from nncf.torch.graph.transformations.commands import ExtraCompressionModuleType quantization_types = [class_type.__name__ for class_type in QUANTIZATION_MODULES.registry_dict.values()] quantizers_in_nncf_modules = owner_model.nncf.get_modules_in_nncf_modules_by_type(quantization_types) diff --git a/nncf/torch/quantization/external_quantizer.py b/nncf/torch/quantization/external_quantizer.py index 128441a7eab..7df7d20f994 100644 --- a/nncf/torch/quantization/external_quantizer.py +++ b/nncf/torch/quantization/external_quantizer.py @@ -9,7 +9,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from nncf.torch.dynamic_graph.context import TracingContext from nncf.torch.external_hook import ExternalOpCallHook from nncf.torch.quantization.debug_interface import QuantizationDebugInterface @@ -25,11 +24,10 @@ class ExternalQuantizerCallHook(ExternalOpCallHook): def __init__( self, - context: TracingContext, quantizer_storage_key: str, debug_interface: QuantizationDebugInterface = None, ): - super().__init__(EXTERNAL_QUANTIZERS_STORAGE_NAME, context, quantizer_storage_key) + super().__init__(EXTERNAL_QUANTIZERS_STORAGE_NAME, quantizer_storage_key) self.debug_interface = debug_interface def __call__(self, *args, **kwargs): diff --git a/nncf/torch/quantization/init_range.py b/nncf/torch/quantization/init_range.py index 6ff97bc35c5..87eb97cdf8e 100644 --- a/nncf/torch/quantization/init_range.py +++ b/nncf/torch/quantization/init_range.py @@ -107,32 +107,10 @@ def __init__( self._input_shape = input_shape self._channel_idx = channel_idx - def get_reduction_axes(self, per_sample_stats: bool) -> ReductionAxes: - """ - Calculates the reduction axes of the tensor. - - :param per_sample_stats: Boolean flag that indicated whether statistics are collected per-sample or per-batch. - :return: Shape to reduce to. - """ - ndims = len(self._input_shape) - reduction_axes: List[int] = list(range(ndims)) + def get_reduction_aggregation_axes(self, is_per_sample: bool) -> Tuple[ReductionAxes, AggregationAxes]: if self.is_per_channel: - val = (ndims + self._channel_idx) % ndims - reduction_axes.remove(val) - if not val and self.use_per_sample_stats(per_sample_stats): - raise nncf.InternalError("Batch dimension should be equal to zero") - if self.use_per_sample_stats(per_sample_stats): - reduction_axes = reduction_axes[1:] # Assumes batch is the first dimension - return tuple(reduction_axes) - - def get_aggregation_axes(self, per_sample_stats: bool) -> AggregationAxes: - """ - Calculates the aggregation axes of the tensor. - - :param per_sample_stats: Boolean flag that indicated whether statistics are collected per-sample or per-batch. - :return: Shape to aggregate to. - """ - return (0, 1) if self.use_per_sample_stats(per_sample_stats) else (0,) + return super().get_reduction_aggregation_axes(self._input_shape, (self._channel_idx,), is_per_sample) + return super().get_reduction_aggregation_axes(self._input_shape, (), is_per_sample) class StatCollectorGenerator: @@ -180,9 +158,7 @@ def generate_stat_collector_for_range_init_config( raise nncf.InternalError("Unknown range init type: {}".format(init_config.init_type)) use_per_sample_stats = collector_params.use_per_sample_stats(init_config.init_type == "mixed_min_max") - reduction_axes = collector_params.get_reduction_axes(use_per_sample_stats) - aggregation_axes = collector_params.get_aggregation_axes(use_per_sample_stats) - + reduction_axes, aggregation_axes = collector_params.get_reduction_aggregation_axes(use_per_sample_stats) if init_config.init_type == "min_max": return get_min_max_statistic_collector( use_abs_max=collector_params.use_abs_max, diff --git a/nncf/torch/quantization/layers.py b/nncf/torch/quantization/layers.py index 937d156c8fe..cb8906b1ff0 100644 --- a/nncf/torch/quantization/layers.py +++ b/nncf/torch/quantization/layers.py @@ -1039,14 +1039,18 @@ class WeightsDecompressor(nn.Module): Applies decompression of compressed weights in the forward pass """ - def __init__(self, scale: torch.Tensor, zero_point: torch.Tensor): + def __init__(self, scale: torch.Tensor, zero_point: torch.Tensor, result_dtype: torch.dtype = None): """ :param scale: A scale in quantization scheme :param zero_point: A zero point in quantization scheme + :param result_dtype: (Optional) A data type that result should be cast to """ super().__init__() self.register_buffer("_scale", scale) self.register_buffer("_zero_point", zero_point) + self.result_dtype = result_dtype def forward(self, x): - return decompress(x, self._scale, self._zero_point) + result = decompress(x, self._scale, self._zero_point) + result = result.type(dtype=self.result_dtype) if self.result_dtype is not None else result + return result diff --git a/nncf/torch/quantization/precision_init/base_init.py b/nncf/torch/quantization/precision_init/base_init.py index ef4404c0944..9a1581ebf93 100644 --- a/nncf/torch/quantization/precision_init/base_init.py +++ b/nncf/torch/quantization/precision_init/base_init.py @@ -18,8 +18,8 @@ from nncf.common.quantization.structs import QuantizerId from nncf.common.quantization.structs import WeightQuantizerId from nncf.torch.dynamic_graph.scope import Scope +from nncf.torch.graph.transformations.commands import ExtraCompressionModuleType from nncf.torch.module_operations import UpdateWeight -from nncf.torch.nncf_network import ExtraCompressionModuleType from nncf.torch.nncf_network import NNCFNetwork from nncf.torch.quantization.layers import QUANTIZATION_MODULES from nncf.torch.quantization.layers import BaseQuantizer diff --git a/nncf/torch/quantization/precision_init/hawq_debug.py b/nncf/torch/quantization/precision_init/hawq_debug.py index 697d8367bbc..df8ddfefdbf 100644 --- a/nncf/torch/quantization/precision_init/hawq_debug.py +++ b/nncf/torch/quantization/precision_init/hawq_debug.py @@ -20,7 +20,7 @@ from nncf.common.logging import nncf_logger from nncf.common.utils.decorators import skip_if_dependency_unavailable from nncf.common.utils.dot_file_rw import write_dot_graph -from nncf.torch.nncf_network import ExtraCompressionModuleType +from nncf.torch.graph.transformations.commands import ExtraCompressionModuleType from nncf.torch.nncf_network import NNCFNetwork from nncf.torch.quantization.adjust_padding import add_adjust_padding_nodes from nncf.torch.quantization.layers import QUANTIZATION_MODULES diff --git a/nncf/torch/quantization/precision_init/hawq_init.py b/nncf/torch/quantization/precision_init/hawq_init.py index 743e458413a..a60b15ad563 100644 --- a/nncf/torch/quantization/precision_init/hawq_init.py +++ b/nncf/torch/quantization/precision_init/hawq_init.py @@ -95,7 +95,7 @@ def from_config( return cls( user_init_args=user_init_args, bitwidths=hawq_init_config_dict.get("bits", PRECISION_INIT_BITWIDTHS), - traces_per_layer_path=hawq_init_config_dict.get("traces_per_layer_path", None), + traces_per_layer_path=hawq_init_config_dict.get("traces_per_layer_path"), num_data_points=hawq_init_config_dict.get("num_data_points", HAWQ_NUM_DATA_POINTS), iter_number=hawq_init_config_dict.get("iter_number", HAWQ_ITER_NUMBER), tolerance=hawq_init_config_dict.get("tolerance", HAWQ_TOLERANCE), diff --git a/nncf/torch/quantization/quantize_model.py b/nncf/torch/quantization/quantize_model.py index fe883dabb6e..48f3ddefae2 100644 --- a/nncf/torch/quantization/quantize_model.py +++ b/nncf/torch/quantization/quantize_model.py @@ -26,7 +26,9 @@ from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters from nncf.quantization.algorithms.post_training.algorithm import PostTrainingQuantization from nncf.quantization.algorithms.weight_compression.algorithm import WeightCompression +from nncf.quantization.quantize_model import warning_model_no_batchwise_support from nncf.scopes import IgnoredScope +from nncf.torch.graph.operator_metatypes import OPERATIONS_OUTPUT_HAS_NO_BATCH_AXIS from nncf.torch.model_creation import wrap_model DEFAULT_RANGE_TYPE = "mean_min_max" @@ -68,10 +70,9 @@ def quantize_impl( ignored_scope=ignored_scope, advanced_parameters=advanced_parameters, ) - - quantized_model = quantization_algorithm.apply( - nncf_network, nncf_network.nncf.get_graph(), dataset=calibration_dataset - ) + graph = nncf_network.nncf.get_graph() + warning_model_no_batchwise_support(graph, advanced_parameters, model_type, OPERATIONS_OUTPUT_HAS_NO_BATCH_AXIS) + quantized_model = quantization_algorithm.apply(nncf_network, graph, dataset=calibration_dataset) quantized_model.nncf.disable_dynamic_graph_building() diff --git a/nncf/torch/quantization/strip.py b/nncf/torch/quantization/strip.py index 76dfe2113bf..76cbeac741d 100644 --- a/nncf/torch/quantization/strip.py +++ b/nncf/torch/quantization/strip.py @@ -15,7 +15,7 @@ from torch.quantization.fake_quantize import FakeQuantize import nncf -from nncf.torch.nncf_network import ExtraCompressionModuleType +from nncf.torch.graph.transformations.commands import ExtraCompressionModuleType from nncf.torch.nncf_network import NNCFNetwork from nncf.torch.quantization.layers import AsymmetricQuantizer from nncf.torch.quantization.layers import BaseQuantizer diff --git a/ruff.toml b/ruff.toml index 53940dc8dcb..cf3a51e0c36 100644 --- a/ruff.toml +++ b/ruff.toml @@ -1,10 +1,19 @@ line-length = 120 +exclude = ["nncf/tensorflow/__init__.py"] + +[lint] +preview = true ignore-init-module-imports = true ignore = [ + "E201", # whitespace-after-open-bracket + "E203", # whitespace-before-punctuation + "E231", # missing-whitespace + "E251", # unexpected-spaces-around-keyword-parameter-equals "E731", # lambda-assignment "SIM108", # if-else-block-instead-of-if-exp "SIM110", # reimplemented-builtin "SIM117", # multiple-with-statements + "SIM103", # needless-bool ] select = [ "E", # pycodestyle rules @@ -14,9 +23,8 @@ select = [ extend-select = [ "SIM", # https://pypi.org/project/flake8-simplify ] -exclude = ["nncf/tensorflow/__init__.py"] -[per-file-ignores] +[lint.per-file-ignores] "nncf/experimental/torch/nas/bootstrapNAS/__init__.py" = ["F401"] "nncf/torch/__init__.py" = ["F401", "E402"] "tests/**/*.py" = ["F403"] @@ -24,7 +32,7 @@ exclude = ["nncf/tensorflow/__init__.py"] "examples/**/*.py" = ["F403"] -[flake8-copyright] +[lint.flake8-copyright] notice-rgx = """\ # Copyright \\(c\\) (\\d{4}|\\d{4}-\\d{4}) Intel Corporation # Licensed under the Apache License, Version 2.0 \\(the "License"\\); diff --git a/setup.py b/setup.py index c6899ec0389..08d25898d2b 100644 --- a/setup.py +++ b/setup.py @@ -131,7 +131,7 @@ def find_version(*file_paths): "torch==2.2.1;python_version < '3.11'", ] -ONNX_EXTRAS = ["onnx~=1.13.1", "onnxruntime~=1.14.1;python_version < '3.11'"] +ONNX_EXTRAS = ["onnx==1.16.0", "onnxruntime==1.17.1;python_version < '3.11'"] OPENVINO_EXTRAS = ["openvino==2024.0"] diff --git a/tests/common/experimental/test_tensor_collector_batch_size.py b/tests/common/experimental/test_tensor_collector_batch_size.py new file mode 100644 index 00000000000..8f29ccb1e53 --- /dev/null +++ b/tests/common/experimental/test_tensor_collector_batch_size.py @@ -0,0 +1,120 @@ +# Copyright (c) 2024 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from abc import ABC +from abc import abstractmethod +from typing import List + +import numpy as np +import pytest + +from nncf.common.graph.utils import get_reduction_axes +from nncf.experimental.common.tensor_statistics.collectors import TensorCollector + + +class TemplateTestTensorCollectorBatchSize(ABC): + @staticmethod + @abstractmethod + def get_tensor_statistics_class(): + pass + + @staticmethod + @abstractmethod + def get_tensor_processor(): + pass + + @staticmethod + @abstractmethod + def get_nncf_tensor_class(): + pass + + @pytest.fixture + @abstractmethod + def reducers(self): + pass + + @pytest.fixture + @abstractmethod + def aggregators(self): + pass + + @pytest.fixture + @abstractmethod + def inplace(self): + pass + + @staticmethod + @abstractmethod + def to_backend_tensor(self, tensor: np.ndarray): + pass + + def create_dataitems_without_batch_dim(self, input_shape: List[int], length: int = 100) -> List[np.ndarray]: + rng = np.random.default_rng(seed=0) + data_items = [] + for _ in range(length): + data_items.append(rng.uniform(0, 1, input_shape)) + return data_items + + def add_batch_dim_to_dataitems(self, data_items: List[np.ndarray], batch_size: int) -> List[np.ndarray]: + assert batch_size >= 1 + dataset = [] + item = [] + cnt = 0 + for data_item in data_items: + if batch_size == 1: + dataset.append(np.expand_dims(data_item, 0)) + else: + item.append(data_item) + if cnt == batch_size - 1: + dataset.append(np.array(item)) + item = [] + cnt = -1 + cnt += 1 + + return dataset + + def _create_tensor_collector(self, shape, inplace, reducer, aggregator) -> TensorCollector: + batch_axis = 0 + statistic_branch_random_name = "1" + collector = TensorCollector(self.get_tensor_statistics_class()) + reduction_axes = get_reduction_axes([batch_axis], shape) + aggregation_axes = (0, 1) + kwargs = {"reduction_axes": reduction_axes, "inplace": inplace} + reducer = reducer(**kwargs) + aggregator = aggregator( + aggregation_axes=aggregation_axes, + tensor_processor=self.get_tensor_processor(), + ) + collector.register_statistic_branch(statistic_branch_random_name, reducer, aggregator) + return collector, reducer, aggregator + + def _register_inputs(self, collector, dataitems, reducer): + for item in dataitems: + item = self.to_backend_tensor(item) + input_ = {hash(reducer): [self.get_nncf_tensor_class()(item)]} + collector.register_inputs(input_) + + def test_statistics_batch_size_equal(self, reducers, aggregators, inplace): + tensor_shape = [3, 20, 20] + dataitems = self.create_dataitems_without_batch_dim(input_shape=tensor_shape) + + shape_batch_1 = [1, *tensor_shape] + collector, reducer, _ = self._create_tensor_collector(shape_batch_1, inplace, reducers, aggregators) + dataitems_batch_1 = self.add_batch_dim_to_dataitems(dataitems, batch_size=1) + self._register_inputs(collector, dataitems_batch_1, reducer) + aggregated_tensor_batch_1 = list(collector._aggregate().values()) + + shape_batch_10 = [10, *tensor_shape] + collector, reducer, _ = self._create_tensor_collector(shape_batch_10, inplace, reducers, aggregators) + dataitems_batch_10 = self.add_batch_dim_to_dataitems(dataitems, batch_size=10) + self._register_inputs(collector, dataitems_batch_10, reducer) + aggregated_tensor_batch_10 = list(collector._aggregate().values()) + + assert np.array_equal(aggregated_tensor_batch_1, aggregated_tensor_batch_10) diff --git a/tests/common/graph/test_utils.py b/tests/common/graph/test_utils.py index a58b121117b..477ba2e9d85 100644 --- a/tests/common/graph/test_utils.py +++ b/tests/common/graph/test_utils.py @@ -12,6 +12,7 @@ import pytest from nncf.common.graph.utils import get_concat_axis +from nncf.common.graph.utils import get_reduction_axes TEST_CASES = [ ([(1, 1), (1, 1)], [(2, 1)], [0]), @@ -27,3 +28,18 @@ def test_get_concat_axis(input_shape, output_shape, possible_axes): axis = get_concat_axis(input_shape, output_shape) assert axis in possible_axes + + +@pytest.mark.parametrize( + "shape, channel_axes, ref_reduction_axes", + [ + ((1, 128), [-1], (0,)), + ((1, 256, 1), [-2], (0, 2)), + ((1, 128, 512), [-1], (0, 1)), + ((1, 3, 224, 224), [1], (0, 2, 3)), + ((1, 1, 12, 12), [1], (0, 2, 3)), + ((1, 1, 12, 12), [1, 2], (0, 3)), + ], +) +def test_get_reduction_axes(shape, channel_axes, ref_reduction_axes): + assert get_reduction_axes(channel_axes, shape) == ref_reduction_axes diff --git a/tests/common/quantization/mock_graphs.py b/tests/common/quantization/mock_graphs.py index 7fe44a9836f..b8cb41ee142 100644 --- a/tests/common/quantization/mock_graphs.py +++ b/tests/common/quantization/mock_graphs.py @@ -10,7 +10,7 @@ # limitations under the License. import random -from typing import Dict, List, Optional, Set, Tuple +from typing import Any, Dict, List, Optional, Set, Tuple from unittest.mock import MagicMock import networkx as nx @@ -57,7 +57,9 @@ def __init__( self.layer_attributes = layer_attributes -def create_mock_graph(nodes: List[NodeWithType], node_edges: List[Tuple[str, str]]) -> nx.DiGraph: +def create_mock_graph( + nodes: List[NodeWithType], node_edges: List[Tuple[str, str]], edges_attrs: Optional[Tuple[Any]] = None +) -> nx.DiGraph: mock_graph = nx.DiGraph() for node in nodes: mock_node_attrs = get_mock_nncf_node_attrs( @@ -67,7 +69,11 @@ def create_mock_graph(nodes: List[NodeWithType], node_edges: List[Tuple[str, str layer_attributes=node.layer_attributes, ) mock_graph.add_node(node.node_name, **mock_node_attrs) - mock_graph.add_edges_from(node_edges) + if edges_attrs: + for (edge_from, edge_to), attr in zip(node_edges, edges_attrs): + mock_graph.add_edge(edge_from, edge_to, **attr) + else: + mock_graph.add_edges_from(node_edges) mark_input_ports_lexicographically_based_on_input_node_key(mock_graph) return mock_graph @@ -121,8 +127,9 @@ def get_nncf_graph_from_mock_nx_graph(nx_graph: nx.DiGraph, nncf_graph_cls=NNCFG out_idx, creator_id = edge_vs_output_idx_and_creator_id[in_edge] edge_data = nx_graph.edges[in_edge] dtype = edge_data.get(NNCFGraph.DTYPE_EDGE_ATTR, Dtype.FLOAT) + shape = edge_data.get(NNCFGraph.ACTIVATION_SHAPE_EDGE_ATTR, [1, 1, 1, 1]) mock_graph.add_edge_between_nncf_nodes( - creator_id, node_id, [1, 1, 1, 1], input_port_id=pred_idx, output_port_id=out_idx, dtype=dtype + creator_id, node_id, shape, input_port_id=pred_idx, output_port_id=out_idx, dtype=dtype ) for out_idx, out_edge in enumerate(nx_graph.out_edges(curr_node_key)): diff --git a/tests/common/test_statistics_aggregator.py b/tests/common/test_statistics_aggregator.py index f32ebfb74a0..2a2161abc40 100644 --- a/tests/common/test_statistics_aggregator.py +++ b/tests/common/test_statistics_aggregator.py @@ -22,9 +22,9 @@ from nncf.common.factory import NNCFGraphFactory from nncf.common.graph.transformations.commands import TargetPoint from nncf.common.graph.transformations.commands import TargetType -from nncf.common.quantization.initialization.range import RangeInitCollectorParams from nncf.common.quantization.structs import QuantizationScheme as QuantizationMode from nncf.common.quantization.structs import QuantizerConfig +from nncf.common.tensor_statistics.aggregator import EMPTY_DATASET_ERROR from nncf.common.tensor_statistics.statistic_point import StatisticPoint from nncf.common.tensor_statistics.statistic_point import StatisticPointsContainer from nncf.experimental.common.tensor_statistics.collectors import NoopAggregator @@ -32,6 +32,7 @@ from nncf.experimental.common.tensor_statistics.collectors import TensorReducerBase from nncf.quantization.algorithms.bias_correction.backend import BiasCorrectionAlgoBackend from nncf.quantization.algorithms.fast_bias_correction.backend import FastBiasCorrectionAlgoBackend +from nncf.quantization.algorithms.min_max.algorithm import MinMaxQuantization from nncf.quantization.algorithms.min_max.backend import MinMaxAlgoBackend from nncf.quantization.range_estimator import AggregatorType from nncf.quantization.range_estimator import RangeEstimatorParameters @@ -40,6 +41,14 @@ from nncf.quantization.range_estimator import StatisticsType +class MockedDataset: + def __iter__(self): + return self + + def __next__(self): + raise StopIteration + + class BiasCorrectionAlgos(Enum): BIAS_CORRECTION = "bias_correction" FAST_BIAS_CORRECTION = "fast_bias_correction" @@ -119,6 +128,10 @@ def reducers_map(self) -> List[TensorReducerBase]: def dataset_values(self): return [{"max": 1, "min": -10}, {"max": 0.1, "min": -1}, {"max": 128, "min": -128}] + @staticmethod + def get_min_max_algo_cls() -> Type[MinMaxQuantization]: + return MinMaxQuantization + @dataclass class MinMaxTestParameters: range_estimator_params: RangeEstimatorParameters @@ -382,6 +395,7 @@ def test_statistics_aggregator_min_max( dataset_samples, inplace_statistics, is_backend_support_custom_estimators, + mocker, ): model = self.get_backend_model(dataset_samples) quantizer_config = QuantizerConfig( @@ -405,6 +419,7 @@ def test_statistics_aggregator_min_max( algorithm_name, inplace_statistics, test_parameters.range_estimator_params, + mocker, ) statistics_points = StatisticPointsContainer() statistics_points.add_statistic_point(statistic_point) @@ -626,24 +641,20 @@ def filter_func(point): @classmethod def create_statistics_point( - cls, model, q_config, target_point, subset_size, algorithm_name, inplace_statistics, range_estimator + cls, model, q_config, target_point, subset_size, algorithm_name, inplace_statistics, range_estimator, mocker ): - algo_backend = cls.get_min_max_algo_backend_cls() - nncf_graph = NNCFGraphFactory.create(model) - - collector_params = RangeInitCollectorParams( - is_weights=target_point.is_weight_target_point(), - scheme=q_config.mode, - per_channel=q_config.per_channel, + _ = mocker.patch( + "nncf.quantization.algorithms.min_max.algorithm.MinMaxQuantization._get_range_estimator_parameters", + return_value=range_estimator, ) - tensor_collector = algo_backend.get_statistic_collector( - range_estimator, - nncf_graph=nncf_graph, - target_point=target_point, - collector_params=collector_params, - num_samples=subset_size, - inplace=inplace_statistics, + algo = cls.get_min_max_algo_cls()( + subset_size=subset_size, + inplace_statistics=inplace_statistics, ) + algo._set_backend_entity(model) + nncf_graph = NNCFGraphFactory.create(model) + algo._subset_size = subset_size + tensor_collector = algo._get_stat_collector(nncf_graph, target_point, q_config, False) return StatisticPoint(target_point=target_point, tensor_collector=tensor_collector, algorithm=algorithm_name) @pytest.mark.parametrize( @@ -656,7 +667,7 @@ def create_statistics_point( ), ), ) - def test_statistics_merging_simple(self, dataset_samples, inplace_statistics, statistic_point_params): + def test_statistics_merging_simple(self, dataset_samples, inplace_statistics, statistic_point_params, mocker): model = self.get_backend_model(dataset_samples) quantizer_config = QuantizerConfig(mode=QuantizationMode.SYMMETRIC, per_channel=False) subset_size = len(dataset_samples) @@ -669,7 +680,14 @@ def test_statistics_merging_simple(self, dataset_samples, inplace_statistics, st ref_val[algorithm_name] = (ref_min_val, ref_max_val) target_point = self.get_target_point(target_point_type) statistics_point = self.create_statistics_point( - model, quantizer_config, target_point, subset_size, algorithm_name, inplace_statistics, range_estimator + model, + quantizer_config, + target_point, + subset_size, + algorithm_name, + inplace_statistics, + range_estimator, + mocker, ) statistics_points.add_statistic_point(statistics_point) @@ -745,45 +763,33 @@ def _check_shared_convs_merged_stats(cls, merged_statistics): } @pytest.mark.parametrize("key", ["split_concat", "shared_conv"]) - def test_statistic_merging(self, test_params, key, dataset_samples, inplace_statistics): + def test_statistic_merging(self, test_params, key, dataset_samples, inplace_statistics, mocker): params = test_params["test_statistic_merging"][key] model = params["model"](dataset_samples) nncf_graph = NNCFGraphFactory.create(model) quantizer_config = QuantizerConfig(mode=QuantizationMode.SYMMETRIC, per_channel=False) statistics_points = StatisticPointsContainer() - collectors_and_refs = [] - algo_backend = self.get_min_max_algo_backend_cls() target_point_cls = self.get_target_point_cls() + sp_and_refs = [] for target_point_args, ref in self.MERGED_TARGET_POINT_AND_REFS[key]: target_point = target_point_cls(*target_point_args) - collector_params = RangeInitCollectorParams( - is_weights=target_point.is_weight_target_point(), - scheme=quantizer_config.mode, - per_channel=quantizer_config.per_channel, - ) - min_max_tensor_collector = algo_backend.get_statistic_collector( - RangeEstimatorParametersSet.MINMAX, - nncf_graph=nncf_graph, - target_point=target_point, - collector_params=collector_params, - num_samples=len(dataset_samples), - inplace=inplace_statistics, - ) - mean_min_max_tensor_collector = algo_backend.get_statistic_collector( - RangeEstimatorParametersSet.MEAN_MINMAX, - nncf_graph=nncf_graph, - target_point=target_point, - collector_params=collector_params, - num_samples=len(dataset_samples), - inplace=inplace_statistics, - ) - - for tensor_collector in [min_max_tensor_collector, mean_min_max_tensor_collector]: - stat_point = StatisticPoint(target_point, tensor_collector, "TEST") - statistics_points.add_statistic_point(stat_point) - collectors_and_refs.append((min_max_tensor_collector, ref["min_max"])) - collectors_and_refs.append((mean_min_max_tensor_collector, ref["mean_min_max"])) + for estimator, ref_val in ( + (RangeEstimatorParametersSet.MINMAX, ref["min_max"]), + (RangeEstimatorParametersSet.MEAN_MINMAX, ref["mean_min_max"]), + ): + s_p = self.create_statistics_point( + model, + quantizer_config, + target_point, + len(dataset_samples), + "TEST", + inplace_statistics, + estimator, + mocker, + ) + statistics_points.add_statistic_point(s_p) + sp_and_refs.append((s_p, ref_val)) dataset = self.get_dataset(dataset_samples) statistics_aggregator = self.get_statistics_aggregator(dataset) @@ -798,7 +804,8 @@ def test_statistic_merging(self, test_params, key, dataset_samples, inplace_stat statistics_aggregator.register_statistic_points(statistics_points) statistics_aggregator.collect_statistics(model, nncf_graph) - for collector, ref in collectors_and_refs: + for sp, ref in sp_and_refs: + collector = sp.algorithm_to_tensor_collectors["TEST"][0] stat = collector.get_statistics() assert np.allclose(stat.min_values, ref[0]) assert np.allclose(stat.max_values, ref[1]) @@ -871,7 +878,7 @@ def product_dict(**kwargs): ), ), ) - def test_register_statistics(self, dataset_samples, statistic_point_params): + def test_register_statistics(self, dataset_samples, statistic_point_params, mocker): model = self.get_backend_model(dataset_samples) quantizer_config = QuantizerConfig(mode=QuantizationMode.SYMMETRIC, per_channel=False) statistics_points = StatisticPointsContainer() @@ -882,7 +889,7 @@ def test_register_statistics(self, dataset_samples, statistic_point_params): ref_val[algorithm_name] = subset_size target_point = self.get_target_point(target_point_type) statistics_point = self.create_statistics_point( - model, quantizer_config, target_point, subset_size, algorithm_name, True, range_estimator + model, quantizer_config, target_point, subset_size, algorithm_name, True, range_estimator, mocker ) statistics_points.add_statistic_point(statistics_point) @@ -898,30 +905,22 @@ def test_register_statistics(self, dataset_samples, statistic_point_params): ref_subset_size = subset_size assert statistics_aggregator.stat_subset_size == ref_subset_size - def test_collect_with_empty_dataset(self, dataset_samples): + def test_collect_with_empty_dataset_no_len(self, dataset_samples): + """ + Checks a correct raising of an error when dataset has no elements to iterate. + """ model = self.get_backend_model(dataset_samples) - dataset_samples = [] - dataset = self.get_dataset(dataset_samples) - graph = NNCFGraphFactory.create(model) - - inplace_statistics = False - quantizer_config = QuantizerConfig(mode=QuantizationMode.ASYMMETRIC, per_channel=False) - target_point = self.get_target_point(TargetType.POST_LAYER_OPERATION) - algorithm_name = "TestAlgo" - statistic_point = self.create_statistics_point( - model, - quantizer_config, - target_point, - len(dataset_samples), - algorithm_name, - inplace_statistics, - RangeEstimatorParametersSet.MEAN_MINMAX, + dummy_statistic_point = StatisticPoint( + target_point=self.get_target_point(TargetType.POST_LAYER_OPERATION), + tensor_collector=TensorCollector(), + algorithm="dummy", ) statistics_points = StatisticPointsContainer() - statistics_points.add_statistic_point(statistic_point) - + statistics_points.add_statistic_point(dummy_statistic_point) + dataset = nncf.Dataset(MockedDataset()) + graph = NNCFGraphFactory.create(model) statistics_aggregator = self.get_statistics_aggregator(dataset) statistics_aggregator.register_statistic_points(statistics_points) with pytest.raises(nncf.ValidationError) as e: statistics_aggregator.collect_statistics(model, graph) - assert "Calibration dataset must not be empty" in e.info + assert EMPTY_DATASET_ERROR in str(e) diff --git a/tests/cross_fw/examples/example_scope.json b/tests/cross_fw/examples/example_scope.json index 30a227be164..edebf9a839e 100644 --- a/tests/cross_fw/examples/example_scope.json +++ b/tests/cross_fw/examples/example_scope.json @@ -172,12 +172,15 @@ "backend": "torch", "requirements": "examples/quantization_aware_training/torch/resnet18/requirements.txt", "cpu": "Intel(R) Core(TM) i9-10980XE CPU @ 3.00GHz", + "accuracy_tolerance_after_training": 1.0, "accuracy_tolerance": 0.2, "accuracy_metrics": { "fp32_top1": 55.52000045776367, - "int8_init_top1": 55.279998779296875, - "int8_top1": 56.7721, - "accuracy_drop": -1.3499984741210938 + "int8_init_top1": 55.279998779296875 + }, + "accuracy_metrics_after_training":{ + "int8_top1": 56.74446202531646, + "accuracy_drop": -1.2244606018066406 }, "performance_metrics": { "fp32_fps": 3646.13, @@ -189,7 +192,6 @@ "int8_model_size": 11.01035213470459, "model_compression_rate": 3.9117704505711512 } - }, "llm_compression": { "backend": "openvino", @@ -209,4 +211,4 @@ "group_size": 64 } } -} +} \ No newline at end of file diff --git a/tests/cross_fw/examples/run_example.py b/tests/cross_fw/examples/run_example.py index 2edbd061cb3..dc44a00e650 100644 --- a/tests/cross_fw/examples/run_example.py +++ b/tests/cross_fw/examples/run_example.py @@ -10,6 +10,7 @@ # limitations under the License. import json +import os import sys from argparse import ArgumentParser from typing import Dict, Tuple @@ -172,6 +173,8 @@ def llm_tune_params() -> Dict[str, float]: def quantization_aware_training_torch_resnet18(): from examples.quantization_aware_training.torch.resnet18.main import main as resnet18_main + # Set manual seed and determenistic cuda mode to make the test determenistic + set_torch_cuda_seed() results = resnet18_main() return { @@ -188,6 +191,27 @@ def quantization_aware_training_torch_resnet18(): } +def set_torch_cuda_seed(seed: int = 42): + """ + Sets torch, cuda and python random module to determenistic mode with + given seed. + :param seed: Seed to use for determenistic run. + """ + import random + + import numpy as np + import torch + from torch.backends import cudnn + + np.random.seed(seed) + random.seed(seed) + torch.manual_seed(seed) + cudnn.deterministic = True + cudnn.benchmark = False + torch.use_deterministic_algorithms(True) + os.environ["CUBLAS_WORKSPACE_CONFIG"] = ":4096:8" + + def main(argv): parser = ArgumentParser() parser.add_argument("--name", help="Example name", required=True) diff --git a/tests/cross_fw/examples/test_examples.py b/tests/cross_fw/examples/test_examples.py index 8127315d94d..9dc38ea2d24 100644 --- a/tests/cross_fw/examples/test_examples.py +++ b/tests/cross_fw/examples/test_examples.py @@ -33,6 +33,7 @@ MODEL_SIZE_RELATIVE_TOLERANCE = 0.05 ACCURACY_METRICS = "accuracy_metrics" +ACCURACY_METRICS_AFTER_TRAINING = "accuracy_metrics_after_training" MODEL_SIZE_METRICS = "model_size_metrics" PERFORMANCE_METRICS = "performance_metrics" @@ -83,6 +84,12 @@ def test_examples( value, abs=example_params.get("accuracy_tolerance", ACCURACY_TOLERANCE) ) + if ACCURACY_METRICS_AFTER_TRAINING in example_params: + for name, value in example_params[ACCURACY_METRICS_AFTER_TRAINING].items(): + assert measured_metrics[name] == pytest.approx( + value, abs=example_params.get("accuracy_tolerance_after_training", ACCURACY_TOLERANCE) + ) + if MODEL_SIZE_METRICS in example_params: for name, value in example_params[MODEL_SIZE_METRICS].items(): assert measured_metrics[name] == pytest.approx(value, rel=MODEL_SIZE_RELATIVE_TOLERANCE) diff --git a/tests/onnx/benchmarking/ac_wrapper.py b/tests/onnx/benchmarking/ac_wrapper.py index 160fa0a287d..9ae3d639859 100644 --- a/tests/onnx/benchmarking/ac_wrapper.py +++ b/tests/onnx/benchmarking/ac_wrapper.py @@ -35,7 +35,7 @@ def _read_image_annotation(image, annotations, label_id_to_label): @staticmethod def convert_to_voc(image_labels): - return [COCO_TO_VOC[label] if label in COCO_TO_VOC else 0 for label in image_labels] + return [COCO_TO_VOC.get(label, 0) for label in image_labels] if __name__ == "__main__": diff --git a/tests/onnx/data/reference_graphs/original_nncf_graph/MaskRCNN-12.dot b/tests/onnx/data/reference_graphs/original_nncf_graph/MaskRCNN-12.dot index d880f5ab256..37de9eae204 100644 --- a/tests/onnx/data/reference_graphs/original_nncf_graph/MaskRCNN-12.dot +++ b/tests/onnx/data/reference_graphs/original_nncf_graph/MaskRCNN-12.dot @@ -3892,272 +3892,272 @@ strict digraph { "3890 nncf_model_output_3" [id=3890, type=nncf_model_output]; "0 2396" -> "237 2397" [label="[1]", style=dashed]; "1 2395" -> "237 2397" [label="[1]", style=dashed]; -"2 0" -> "3 2" [label="[1, 3, 1, 1]", style=solid]; -"3 2" -> "4 5" [label="[1]", style=solid]; -"4 5" -> "5 6" [label="[1]", style=solid]; -"5 6" -> "6 7" [label="[1]", style=solid]; -"6 7" -> "7 8" [label="[1]", style=solid]; -"7 8" -> "8 30" [label="[1]", style=solid]; -"7 8" -> "11 10" [label="[1]", style=solid]; -"8 30" -> "9 33" [label="[1]", style=solid]; -"9 33" -> "10 34" [label="[1]", style=solid]; -"10 34" -> "22 35" [label="[1]", style=solid]; -"11 10" -> "12 13" [label="[1]", style=solid]; -"12 13" -> "13 14" [label="[1]", style=solid]; -"13 14" -> "14 15" [label="[1]", style=solid]; -"14 15" -> "15 17" [label="[1]", style=solid]; -"15 17" -> "16 20" [label="[1]", style=solid]; -"16 20" -> "17 21" [label="[1]", style=solid]; -"17 21" -> "18 22" [label="[1]", style=solid]; -"18 22" -> "19 24" [label="[1]", style=solid]; -"19 24" -> "20 27" [label="[1]", style=solid]; -"20 27" -> "21 28" [label="[1]", style=solid]; -"21 28" -> "22 35" [label="[1]", style=solid]; -"22 35" -> "23 36" [label="[1]", style=solid]; -"23 36" -> "24 38" [label="[1]", style=solid]; -"23 36" -> "35 57" [label="[1]", style=solid]; -"24 38" -> "25 41" [label="[1]", style=solid]; -"25 41" -> "26 42" [label="[1]", style=solid]; -"26 42" -> "27 43" [label="[1]", style=solid]; -"27 43" -> "28 45" [label="[1]", style=solid]; -"28 45" -> "29 48" [label="[1]", style=solid]; -"29 48" -> "30 49" [label="[1]", style=solid]; -"30 49" -> "31 50" [label="[1]", style=solid]; -"31 50" -> "32 52" [label="[1]", style=solid]; -"32 52" -> "33 55" [label="[1]", style=solid]; -"33 55" -> "34 56" [label="[1]", style=solid]; -"34 56" -> "35 57" [label="[1]", style=solid]; -"35 57" -> "36 58" [label="[1]", style=solid]; -"36 58" -> "37 60" [label="[1]", style=solid]; -"36 58" -> "48 79" [label="[1]", style=solid]; -"37 60" -> "38 63" [label="[1]", style=solid]; -"38 63" -> "39 64" [label="[1]", style=solid]; -"39 64" -> "40 65" [label="[1]", style=solid]; -"40 65" -> "41 67" [label="[1]", style=solid]; -"41 67" -> "42 70" [label="[1]", style=solid]; -"42 70" -> "43 71" [label="[1]", style=solid]; -"43 71" -> "44 72" [label="[1]", style=solid]; -"44 72" -> "45 74" [label="[1]", style=solid]; -"45 74" -> "46 77" [label="[1]", style=solid]; -"46 77" -> "47 78" [label="[1]", style=solid]; -"47 78" -> "48 79" [label="[1]", style=solid]; -"48 79" -> "49 80" [label="[1]", style=solid]; -"49 80" -> "50 102" [label="[1]", style=solid]; -"49 80" -> "53 82" [label="[1]", style=solid]; -"49 80" -> "1177 482" [label="[1]", style=solid]; -"50 102" -> "51 105" [label="[1]", style=solid]; -"51 105" -> "52 106" [label="[1]", style=solid]; -"52 106" -> "64 107" [label="[1]", style=solid]; -"53 82" -> "54 85" [label="[1]", style=solid]; -"54 85" -> "55 86" [label="[1]", style=solid]; -"55 86" -> "56 87" [label="[1]", style=solid]; -"56 87" -> "57 89" [label="[1]", style=solid]; -"57 89" -> "58 92" [label="[1]", style=solid]; -"58 92" -> "59 93" [label="[1]", style=solid]; -"59 93" -> "60 94" [label="[1]", style=solid]; -"60 94" -> "61 96" [label="[1]", style=solid]; -"61 96" -> "62 99" [label="[1]", style=solid]; -"62 99" -> "63 100" [label="[1]", style=solid]; -"63 100" -> "64 107" [label="[1]", style=solid]; -"64 107" -> "65 108" [label="[1]", style=solid]; -"65 108" -> "66 110" [label="[1]", style=solid]; -"65 108" -> "77 129" [label="[1]", style=solid]; -"66 110" -> "67 113" [label="[1]", style=solid]; -"67 113" -> "68 114" [label="[1]", style=solid]; -"68 114" -> "69 115" [label="[1]", style=solid]; -"69 115" -> "70 117" [label="[1]", style=solid]; -"70 117" -> "71 120" [label="[1]", style=solid]; -"71 120" -> "72 121" [label="[1]", style=solid]; -"72 121" -> "73 122" [label="[1]", style=solid]; -"73 122" -> "74 124" [label="[1]", style=solid]; -"74 124" -> "75 127" [label="[1]", style=solid]; -"75 127" -> "76 128" [label="[1]", style=solid]; -"76 128" -> "77 129" [label="[1]", style=solid]; -"77 129" -> "78 130" [label="[1]", style=solid]; -"78 130" -> "79 132" [label="[1]", style=solid]; -"78 130" -> "90 151" [label="[1]", style=solid]; -"79 132" -> "80 135" [label="[1]", style=solid]; -"80 135" -> "81 136" [label="[1]", style=solid]; -"81 136" -> "82 137" [label="[1]", style=solid]; -"82 137" -> "83 139" [label="[1]", style=solid]; -"83 139" -> "84 142" [label="[1]", style=solid]; -"84 142" -> "85 143" [label="[1]", style=solid]; -"85 143" -> "86 144" [label="[1]", style=solid]; -"86 144" -> "87 146" [label="[1]", style=solid]; -"87 146" -> "88 149" [label="[1]", style=solid]; -"88 149" -> "89 150" [label="[1]", style=solid]; -"89 150" -> "90 151" [label="[1]", style=solid]; -"90 151" -> "91 152" [label="[1]", style=solid]; -"91 152" -> "92 154" [label="[1]", style=solid]; -"91 152" -> "103 173" [label="[1]", style=solid]; -"92 154" -> "93 157" [label="[1]", style=solid]; -"93 157" -> "94 158" [label="[1]", style=solid]; -"94 158" -> "95 159" [label="[1]", style=solid]; -"95 159" -> "96 161" [label="[1]", style=solid]; -"96 161" -> "97 164" [label="[1]", style=solid]; -"97 164" -> "98 165" [label="[1]", style=solid]; -"98 165" -> "99 166" [label="[1]", style=solid]; -"99 166" -> "100 168" [label="[1]", style=solid]; -"100 168" -> "101 171" [label="[1]", style=solid]; -"101 171" -> "102 172" [label="[1]", style=solid]; -"102 172" -> "103 173" [label="[1]", style=solid]; -"103 173" -> "104 174" [label="[1]", style=solid]; -"104 174" -> "105 196" [label="[1]", style=solid]; -"104 174" -> "108 176" [label="[1]", style=solid]; -"104 174" -> "931 450" [label="[1]", style=solid]; -"105 196" -> "106 199" [label="[1]", style=solid]; -"106 199" -> "107 200" [label="[1]", style=solid]; -"107 200" -> "119 201" [label="[1]", style=solid]; -"108 176" -> "109 179" [label="[1]", style=solid]; -"109 179" -> "110 180" [label="[1]", style=solid]; -"110 180" -> "111 181" [label="[1]", style=solid]; -"111 181" -> "112 183" [label="[1]", style=solid]; -"112 183" -> "113 186" [label="[1]", style=solid]; -"113 186" -> "114 187" [label="[1]", style=solid]; -"114 187" -> "115 188" [label="[1]", style=solid]; -"115 188" -> "116 190" [label="[1]", style=solid]; -"116 190" -> "117 193" [label="[1]", style=solid]; -"117 193" -> "118 194" [label="[1]", style=solid]; -"118 194" -> "119 201" [label="[1]", style=solid]; -"119 201" -> "120 202" [label="[1]", style=solid]; -"120 202" -> "121 204" [label="[1]", style=solid]; -"120 202" -> "132 223" [label="[1]", style=solid]; -"121 204" -> "122 207" [label="[1]", style=solid]; -"122 207" -> "123 208" [label="[1]", style=solid]; -"123 208" -> "124 209" [label="[1]", style=solid]; -"124 209" -> "125 211" [label="[1]", style=solid]; -"125 211" -> "126 214" [label="[1]", style=solid]; -"126 214" -> "127 215" [label="[1]", style=solid]; -"127 215" -> "128 216" [label="[1]", style=solid]; -"128 216" -> "129 218" [label="[1]", style=solid]; -"129 218" -> "130 221" [label="[1]", style=solid]; -"130 221" -> "131 222" [label="[1]", style=solid]; -"131 222" -> "132 223" [label="[1]", style=solid]; -"132 223" -> "133 224" [label="[1]", style=solid]; -"133 224" -> "134 226" [label="[1]", style=solid]; -"133 224" -> "145 245" [label="[1]", style=solid]; -"134 226" -> "135 229" [label="[1]", style=solid]; -"135 229" -> "136 230" [label="[1]", style=solid]; -"136 230" -> "137 231" [label="[1]", style=solid]; -"137 231" -> "138 233" [label="[1]", style=solid]; -"138 233" -> "139 236" [label="[1]", style=solid]; -"139 236" -> "140 237" [label="[1]", style=solid]; -"140 237" -> "141 238" [label="[1]", style=solid]; -"141 238" -> "142 240" [label="[1]", style=solid]; -"142 240" -> "143 243" [label="[1]", style=solid]; -"143 243" -> "144 244" [label="[1]", style=solid]; -"144 244" -> "145 245" [label="[1]", style=solid]; -"145 245" -> "146 246" [label="[1]", style=solid]; -"146 246" -> "147 248" [label="[1]", style=solid]; -"146 246" -> "158 267" [label="[1]", style=solid]; -"147 248" -> "148 251" [label="[1]", style=solid]; -"148 251" -> "149 252" [label="[1]", style=solid]; -"149 252" -> "150 253" [label="[1]", style=solid]; -"150 253" -> "151 255" [label="[1]", style=solid]; -"151 255" -> "152 258" [label="[1]", style=solid]; -"152 258" -> "153 259" [label="[1]", style=solid]; -"153 259" -> "154 260" [label="[1]", style=solid]; -"154 260" -> "155 262" [label="[1]", style=solid]; -"155 262" -> "156 265" [label="[1]", style=solid]; -"156 265" -> "157 266" [label="[1]", style=solid]; -"157 266" -> "158 267" [label="[1]", style=solid]; -"158 267" -> "159 268" [label="[1]", style=solid]; -"159 268" -> "160 270" [label="[1]", style=solid]; -"159 268" -> "171 289" [label="[1]", style=solid]; -"160 270" -> "161 273" [label="[1]", style=solid]; -"161 273" -> "162 274" [label="[1]", style=solid]; -"162 274" -> "163 275" [label="[1]", style=solid]; -"163 275" -> "164 277" [label="[1]", style=solid]; -"164 277" -> "165 280" [label="[1]", style=solid]; -"165 280" -> "166 281" [label="[1]", style=solid]; -"166 281" -> "167 282" [label="[1]", style=solid]; -"167 282" -> "168 284" [label="[1]", style=solid]; -"168 284" -> "169 287" [label="[1]", style=solid]; -"169 287" -> "170 288" [label="[1]", style=solid]; -"170 288" -> "171 289" [label="[1]", style=solid]; -"171 289" -> "172 290" [label="[1]", style=solid]; -"172 290" -> "173 292" [label="[1]", style=solid]; -"172 290" -> "184 311" [label="[1]", style=solid]; -"173 292" -> "174 295" [label="[1]", style=solid]; -"174 295" -> "175 296" [label="[1]", style=solid]; -"175 296" -> "176 297" [label="[1]", style=solid]; -"176 297" -> "177 299" [label="[1]", style=solid]; -"177 299" -> "178 302" [label="[1]", style=solid]; -"178 302" -> "179 303" [label="[1]", style=solid]; -"179 303" -> "180 304" [label="[1]", style=solid]; -"180 304" -> "181 306" [label="[1]", style=solid]; -"181 306" -> "182 309" [label="[1]", style=solid]; -"182 309" -> "183 310" [label="[1]", style=solid]; -"183 310" -> "184 311" [label="[1]", style=solid]; -"184 311" -> "185 312" [label="[1]", style=solid]; -"185 312" -> "186 334" [label="[1]", style=solid]; -"185 312" -> "189 314" [label="[1]", style=solid]; -"185 312" -> "685 418" [label="[1]", style=solid]; -"186 334" -> "187 337" [label="[1]", style=solid]; -"187 337" -> "188 338" [label="[1]", style=solid]; -"188 338" -> "200 339" [label="[1]", style=solid]; -"189 314" -> "190 317" [label="[1]", style=solid]; -"190 317" -> "191 318" [label="[1]", style=solid]; -"191 318" -> "192 319" [label="[1]", style=solid]; -"192 319" -> "193 321" [label="[1]", style=solid]; -"193 321" -> "194 324" [label="[1]", style=solid]; -"194 324" -> "195 325" [label="[1]", style=solid]; -"195 325" -> "196 326" [label="[1]", style=solid]; -"196 326" -> "197 328" [label="[1]", style=solid]; -"197 328" -> "198 331" [label="[1]", style=solid]; -"198 331" -> "199 332" [label="[1]", style=solid]; -"199 332" -> "200 339" [label="[1]", style=solid]; -"200 339" -> "201 340" [label="[1]", style=solid]; -"201 340" -> "202 342" [label="[1]", style=solid]; -"201 340" -> "213 361" [label="[1]", style=solid]; -"202 342" -> "203 345" [label="[1]", style=solid]; -"203 345" -> "204 346" [label="[1]", style=solid]; -"204 346" -> "205 347" [label="[1]", style=solid]; -"205 347" -> "206 349" [label="[1]", style=solid]; -"206 349" -> "207 352" [label="[1]", style=solid]; -"207 352" -> "208 353" [label="[1]", style=solid]; -"208 353" -> "209 354" [label="[1]", style=solid]; -"209 354" -> "210 356" [label="[1]", style=solid]; -"210 356" -> "211 359" [label="[1]", style=solid]; -"211 359" -> "212 360" [label="[1]", style=solid]; -"212 360" -> "213 361" [label="[1]", style=solid]; -"213 361" -> "214 362" [label="[1]", style=solid]; -"214 362" -> "215 364" [label="[1]", style=solid]; -"214 362" -> "226 383" [label="[1]", style=solid]; -"215 364" -> "216 367" [label="[1]", style=solid]; -"216 367" -> "217 368" [label="[1]", style=solid]; -"217 368" -> "218 369" [label="[1]", style=solid]; -"218 369" -> "219 371" [label="[1]", style=solid]; -"219 371" -> "220 374" [label="[1]", style=solid]; -"220 374" -> "221 375" [label="[1]", style=solid]; -"221 375" -> "222 376" [label="[1]", style=solid]; -"222 376" -> "223 378" [label="[1]", style=solid]; -"223 378" -> "224 381" [label="[1]", style=solid]; -"224 381" -> "225 382" [label="[1]", style=solid]; -"225 382" -> "226 383" [label="[1]", style=solid]; -"226 383" -> "227 384" [label="[1]", style=solid]; -"227 384" -> "228 387" [label="[1]", style=solid]; -"228 387" -> "229 390" [label="[1]", style=solid]; -"228 387" -> "686 407" [label="[1]", style=solid]; -"228 387" -> "689 398" [label="[1]", style=solid]; -"228 387" -> "693 392" [label="[1]", style=solid]; -"228 387" -> "701 415" [label="[1]", style=solid]; -"229 390" -> "230 487" [label="[1]", style=solid]; -"229 390" -> "458 506" [label="[1]", style=solid]; -"229 390" -> "518 536" [label="[1]", style=solid]; -"229 390" -> "521 533" [label="[1]", style=solid]; -"229 390" -> "1597 2620" [label="[1]", style=solid]; -"229 390" -> "3502 6664" [label="[1]", style=solid]; -"230 487" -> "231 510" [label="[1]", style=solid]; -"230 487" -> "291 542" [label="[1]", style=solid]; -"230 487" -> "294 539" [label="[1]", style=solid]; -"231 510" -> "232 511" [label="[1]", style=solid]; -"232 511" -> "233 512" [label="[1]", style=solid]; -"232 511" -> "280 513" [label="[1]", style=solid]; -"233 512" -> "234 2154" [label="[1]", style=solid]; -"233 512" -> "245 2160" [label="[1]", style=solid]; -"233 512" -> "248 2157" [label="[1]", style=solid]; -"233 512" -> "255 2170" [label="[1]", style=solid]; +"2 0" -> "3 2" [label="[1, 3, -1, -1]", style=solid]; +"3 2" -> "4 5" [label="[1, 64, -1, -1]", style=solid]; +"4 5" -> "5 6" [label="[1, 64, -1, -1]", style=solid]; +"5 6" -> "6 7" [label="[1, 64, -1, -1]", style=solid]; +"6 7" -> "7 8" [label="[1, 64, -1, -1]", style=solid]; +"7 8" -> "8 30" [label="[1, 64, -1, -1]", style=solid]; +"7 8" -> "11 10" [label="[1, 64, -1, -1]", style=solid]; +"8 30" -> "9 33" [label="[1, 256, -1, -1]", style=solid]; +"9 33" -> "10 34" [label="[1, 256, -1, -1]", style=solid]; +"10 34" -> "22 35" [label="[1, 256, -1, -1]", style=solid]; +"11 10" -> "12 13" [label="[1, 64, -1, -1]", style=solid]; +"12 13" -> "13 14" [label="[1, 64, -1, -1]", style=solid]; +"13 14" -> "14 15" [label="[1, 64, -1, -1]", style=solid]; +"14 15" -> "15 17" [label="[1, 64, -1, -1]", style=solid]; +"15 17" -> "16 20" [label="[1, 64, -1, -1]", style=solid]; +"16 20" -> "17 21" [label="[1, 64, -1, -1]", style=solid]; +"17 21" -> "18 22" [label="[1, 64, -1, -1]", style=solid]; +"18 22" -> "19 24" [label="[1, 64, -1, -1]", style=solid]; +"19 24" -> "20 27" [label="[1, 256, -1, -1]", style=solid]; +"20 27" -> "21 28" [label="[1, 256, -1, -1]", style=solid]; +"21 28" -> "22 35" [label="[1, 256, -1, -1]", style=solid]; +"22 35" -> "23 36" [label="[1, 256, -1, -1]", style=solid]; +"23 36" -> "24 38" [label="[1, 256, -1, -1]", style=solid]; +"23 36" -> "35 57" [label="[1, 256, -1, -1]", style=solid]; +"24 38" -> "25 41" [label="[1, 64, -1, -1]", style=solid]; +"25 41" -> "26 42" [label="[1, 64, -1, -1]", style=solid]; +"26 42" -> "27 43" [label="[1, 64, -1, -1]", style=solid]; +"27 43" -> "28 45" [label="[1, 64, -1, -1]", style=solid]; +"28 45" -> "29 48" [label="[1, 64, -1, -1]", style=solid]; +"29 48" -> "30 49" [label="[1, 64, -1, -1]", style=solid]; +"30 49" -> "31 50" [label="[1, 64, -1, -1]", style=solid]; +"31 50" -> "32 52" [label="[1, 64, -1, -1]", style=solid]; +"32 52" -> "33 55" [label="[1, 256, -1, -1]", style=solid]; +"33 55" -> "34 56" [label="[1, 256, -1, -1]", style=solid]; +"34 56" -> "35 57" [label="[1, 256, -1, -1]", style=solid]; +"35 57" -> "36 58" [label="[1, 256, -1, -1]", style=solid]; +"36 58" -> "37 60" [label="[1, 256, -1, -1]", style=solid]; +"36 58" -> "48 79" [label="[1, 256, -1, -1]", style=solid]; +"37 60" -> "38 63" [label="[1, 64, -1, -1]", style=solid]; +"38 63" -> "39 64" [label="[1, 64, -1, -1]", style=solid]; +"39 64" -> "40 65" [label="[1, 64, -1, -1]", style=solid]; +"40 65" -> "41 67" [label="[1, 64, -1, -1]", style=solid]; +"41 67" -> "42 70" [label="[1, 64, -1, -1]", style=solid]; +"42 70" -> "43 71" [label="[1, 64, -1, -1]", style=solid]; +"43 71" -> "44 72" [label="[1, 64, -1, -1]", style=solid]; +"44 72" -> "45 74" [label="[1, 64, -1, -1]", style=solid]; +"45 74" -> "46 77" [label="[1, 256, -1, -1]", style=solid]; +"46 77" -> "47 78" [label="[1, 256, -1, -1]", style=solid]; +"47 78" -> "48 79" [label="[1, 256, -1, -1]", style=solid]; +"48 79" -> "49 80" [label="[1, 256, -1, -1]", style=solid]; +"49 80" -> "50 102" [label="[1, 256, -1, -1]", style=solid]; +"49 80" -> "53 82" [label="[1, 256, -1, -1]", style=solid]; +"49 80" -> "1177 482" [label="[1, 256, -1, -1]", style=solid]; +"50 102" -> "51 105" [label="[1, 512, -1, -1]", style=solid]; +"51 105" -> "52 106" [label="[1, 512, -1, -1]", style=solid]; +"52 106" -> "64 107" [label="[1, 512, -1, -1]", style=solid]; +"53 82" -> "54 85" [label="[1, 128, -1, -1]", style=solid]; +"54 85" -> "55 86" [label="[1, 128, -1, -1]", style=solid]; +"55 86" -> "56 87" [label="[1, 128, -1, -1]", style=solid]; +"56 87" -> "57 89" [label="[1, 128, -1, -1]", style=solid]; +"57 89" -> "58 92" [label="[1, 128, -1, -1]", style=solid]; +"58 92" -> "59 93" [label="[1, 128, -1, -1]", style=solid]; +"59 93" -> "60 94" [label="[1, 128, -1, -1]", style=solid]; +"60 94" -> "61 96" [label="[1, 128, -1, -1]", style=solid]; +"61 96" -> "62 99" [label="[1, 512, -1, -1]", style=solid]; +"62 99" -> "63 100" [label="[1, 512, -1, -1]", style=solid]; +"63 100" -> "64 107" [label="[1, 512, -1, -1]", style=solid]; +"64 107" -> "65 108" [label="[1, 512, -1, -1]", style=solid]; +"65 108" -> "66 110" [label="[1, 512, -1, -1]", style=solid]; +"65 108" -> "77 129" [label="[1, 512, -1, -1]", style=solid]; +"66 110" -> "67 113" [label="[1, 128, -1, -1]", style=solid]; +"67 113" -> "68 114" [label="[1, 128, -1, -1]", style=solid]; +"68 114" -> "69 115" [label="[1, 128, -1, -1]", style=solid]; +"69 115" -> "70 117" [label="[1, 128, -1, -1]", style=solid]; +"70 117" -> "71 120" [label="[1, 128, -1, -1]", style=solid]; +"71 120" -> "72 121" [label="[1, 128, -1, -1]", style=solid]; +"72 121" -> "73 122" [label="[1, 128, -1, -1]", style=solid]; +"73 122" -> "74 124" [label="[1, 128, -1, -1]", style=solid]; +"74 124" -> "75 127" [label="[1, 512, -1, -1]", style=solid]; +"75 127" -> "76 128" [label="[1, 512, -1, -1]", style=solid]; +"76 128" -> "77 129" [label="[1, 512, -1, -1]", style=solid]; +"77 129" -> "78 130" [label="[1, 512, -1, -1]", style=solid]; +"78 130" -> "79 132" [label="[1, 512, -1, -1]", style=solid]; +"78 130" -> "90 151" [label="[1, 512, -1, -1]", style=solid]; +"79 132" -> "80 135" [label="[1, 128, -1, -1]", style=solid]; +"80 135" -> "81 136" [label="[1, 128, -1, -1]", style=solid]; +"81 136" -> "82 137" [label="[1, 128, -1, -1]", style=solid]; +"82 137" -> "83 139" [label="[1, 128, -1, -1]", style=solid]; +"83 139" -> "84 142" [label="[1, 128, -1, -1]", style=solid]; +"84 142" -> "85 143" [label="[1, 128, -1, -1]", style=solid]; +"85 143" -> "86 144" [label="[1, 128, -1, -1]", style=solid]; +"86 144" -> "87 146" [label="[1, 128, -1, -1]", style=solid]; +"87 146" -> "88 149" [label="[1, 512, -1, -1]", style=solid]; +"88 149" -> "89 150" [label="[1, 512, -1, -1]", style=solid]; +"89 150" -> "90 151" [label="[1, 512, -1, -1]", style=solid]; +"90 151" -> "91 152" [label="[1, 512, -1, -1]", style=solid]; +"91 152" -> "92 154" [label="[1, 512, -1, -1]", style=solid]; +"91 152" -> "103 173" [label="[1, 512, -1, -1]", style=solid]; +"92 154" -> "93 157" [label="[1, 128, -1, -1]", style=solid]; +"93 157" -> "94 158" [label="[1, 128, -1, -1]", style=solid]; +"94 158" -> "95 159" [label="[1, 128, -1, -1]", style=solid]; +"95 159" -> "96 161" [label="[1, 128, -1, -1]", style=solid]; +"96 161" -> "97 164" [label="[1, 128, -1, -1]", style=solid]; +"97 164" -> "98 165" [label="[1, 128, -1, -1]", style=solid]; +"98 165" -> "99 166" [label="[1, 128, -1, -1]", style=solid]; +"99 166" -> "100 168" [label="[1, 128, -1, -1]", style=solid]; +"100 168" -> "101 171" [label="[1, 512, -1, -1]", style=solid]; +"101 171" -> "102 172" [label="[1, 512, -1, -1]", style=solid]; +"102 172" -> "103 173" [label="[1, 512, -1, -1]", style=solid]; +"103 173" -> "104 174" [label="[1, 512, -1, -1]", style=solid]; +"104 174" -> "105 196" [label="[1, 512, -1, -1]", style=solid]; +"104 174" -> "108 176" [label="[1, 512, -1, -1]", style=solid]; +"104 174" -> "931 450" [label="[1, 512, -1, -1]", style=solid]; +"105 196" -> "106 199" [label="[1, 1024, -1, -1]", style=solid]; +"106 199" -> "107 200" [label="[1, 1024, -1, -1]", style=solid]; +"107 200" -> "119 201" [label="[1, 1024, -1, -1]", style=solid]; +"108 176" -> "109 179" [label="[1, 256, -1, -1]", style=solid]; +"109 179" -> "110 180" [label="[1, 256, -1, -1]", style=solid]; +"110 180" -> "111 181" [label="[1, 256, -1, -1]", style=solid]; +"111 181" -> "112 183" [label="[1, 256, -1, -1]", style=solid]; +"112 183" -> "113 186" [label="[1, 256, -1, -1]", style=solid]; +"113 186" -> "114 187" [label="[1, 256, -1, -1]", style=solid]; +"114 187" -> "115 188" [label="[1, 256, -1, -1]", style=solid]; +"115 188" -> "116 190" [label="[1, 256, -1, -1]", style=solid]; +"116 190" -> "117 193" [label="[1, 1024, -1, -1]", style=solid]; +"117 193" -> "118 194" [label="[1, 1024, -1, -1]", style=solid]; +"118 194" -> "119 201" [label="[1, 1024, -1, -1]", style=solid]; +"119 201" -> "120 202" [label="[1, 1024, -1, -1]", style=solid]; +"120 202" -> "121 204" [label="[1, 1024, -1, -1]", style=solid]; +"120 202" -> "132 223" [label="[1, 1024, -1, -1]", style=solid]; +"121 204" -> "122 207" [label="[1, 256, -1, -1]", style=solid]; +"122 207" -> "123 208" [label="[1, 256, -1, -1]", style=solid]; +"123 208" -> "124 209" [label="[1, 256, -1, -1]", style=solid]; +"124 209" -> "125 211" [label="[1, 256, -1, -1]", style=solid]; +"125 211" -> "126 214" [label="[1, 256, -1, -1]", style=solid]; +"126 214" -> "127 215" [label="[1, 256, -1, -1]", style=solid]; +"127 215" -> "128 216" [label="[1, 256, -1, -1]", style=solid]; +"128 216" -> "129 218" [label="[1, 256, -1, -1]", style=solid]; +"129 218" -> "130 221" [label="[1, 1024, -1, -1]", style=solid]; +"130 221" -> "131 222" [label="[1, 1024, -1, -1]", style=solid]; +"131 222" -> "132 223" [label="[1, 1024, -1, -1]", style=solid]; +"132 223" -> "133 224" [label="[1, 1024, -1, -1]", style=solid]; +"133 224" -> "134 226" [label="[1, 1024, -1, -1]", style=solid]; +"133 224" -> "145 245" [label="[1, 1024, -1, -1]", style=solid]; +"134 226" -> "135 229" [label="[1, 256, -1, -1]", style=solid]; +"135 229" -> "136 230" [label="[1, 256, -1, -1]", style=solid]; +"136 230" -> "137 231" [label="[1, 256, -1, -1]", style=solid]; +"137 231" -> "138 233" [label="[1, 256, -1, -1]", style=solid]; +"138 233" -> "139 236" [label="[1, 256, -1, -1]", style=solid]; +"139 236" -> "140 237" [label="[1, 256, -1, -1]", style=solid]; +"140 237" -> "141 238" [label="[1, 256, -1, -1]", style=solid]; +"141 238" -> "142 240" [label="[1, 256, -1, -1]", style=solid]; +"142 240" -> "143 243" [label="[1, 1024, -1, -1]", style=solid]; +"143 243" -> "144 244" [label="[1, 1024, -1, -1]", style=solid]; +"144 244" -> "145 245" [label="[1, 1024, -1, -1]", style=solid]; +"145 245" -> "146 246" [label="[1, 1024, -1, -1]", style=solid]; +"146 246" -> "147 248" [label="[1, 1024, -1, -1]", style=solid]; +"146 246" -> "158 267" [label="[1, 1024, -1, -1]", style=solid]; +"147 248" -> "148 251" [label="[1, 256, -1, -1]", style=solid]; +"148 251" -> "149 252" [label="[1, 256, -1, -1]", style=solid]; +"149 252" -> "150 253" [label="[1, 256, -1, -1]", style=solid]; +"150 253" -> "151 255" [label="[1, 256, -1, -1]", style=solid]; +"151 255" -> "152 258" [label="[1, 256, -1, -1]", style=solid]; +"152 258" -> "153 259" [label="[1, 256, -1, -1]", style=solid]; +"153 259" -> "154 260" [label="[1, 256, -1, -1]", style=solid]; +"154 260" -> "155 262" [label="[1, 256, -1, -1]", style=solid]; +"155 262" -> "156 265" [label="[1, 1024, -1, -1]", style=solid]; +"156 265" -> "157 266" [label="[1, 1024, -1, -1]", style=solid]; +"157 266" -> "158 267" [label="[1, 1024, -1, -1]", style=solid]; +"158 267" -> "159 268" [label="[1, 1024, -1, -1]", style=solid]; +"159 268" -> "160 270" [label="[1, 1024, -1, -1]", style=solid]; +"159 268" -> "171 289" [label="[1, 1024, -1, -1]", style=solid]; +"160 270" -> "161 273" [label="[1, 256, -1, -1]", style=solid]; +"161 273" -> "162 274" [label="[1, 256, -1, -1]", style=solid]; +"162 274" -> "163 275" [label="[1, 256, -1, -1]", style=solid]; +"163 275" -> "164 277" [label="[1, 256, -1, -1]", style=solid]; +"164 277" -> "165 280" [label="[1, 256, -1, -1]", style=solid]; +"165 280" -> "166 281" [label="[1, 256, -1, -1]", style=solid]; +"166 281" -> "167 282" [label="[1, 256, -1, -1]", style=solid]; +"167 282" -> "168 284" [label="[1, 256, -1, -1]", style=solid]; +"168 284" -> "169 287" [label="[1, 1024, -1, -1]", style=solid]; +"169 287" -> "170 288" [label="[1, 1024, -1, -1]", style=solid]; +"170 288" -> "171 289" [label="[1, 1024, -1, -1]", style=solid]; +"171 289" -> "172 290" [label="[1, 1024, -1, -1]", style=solid]; +"172 290" -> "173 292" [label="[1, 1024, -1, -1]", style=solid]; +"172 290" -> "184 311" [label="[1, 1024, -1, -1]", style=solid]; +"173 292" -> "174 295" [label="[1, 256, -1, -1]", style=solid]; +"174 295" -> "175 296" [label="[1, 256, -1, -1]", style=solid]; +"175 296" -> "176 297" [label="[1, 256, -1, -1]", style=solid]; +"176 297" -> "177 299" [label="[1, 256, -1, -1]", style=solid]; +"177 299" -> "178 302" [label="[1, 256, -1, -1]", style=solid]; +"178 302" -> "179 303" [label="[1, 256, -1, -1]", style=solid]; +"179 303" -> "180 304" [label="[1, 256, -1, -1]", style=solid]; +"180 304" -> "181 306" [label="[1, 256, -1, -1]", style=solid]; +"181 306" -> "182 309" [label="[1, 1024, -1, -1]", style=solid]; +"182 309" -> "183 310" [label="[1, 1024, -1, -1]", style=solid]; +"183 310" -> "184 311" [label="[1, 1024, -1, -1]", style=solid]; +"184 311" -> "185 312" [label="[1, 1024, -1, -1]", style=solid]; +"185 312" -> "186 334" [label="[1, 1024, -1, -1]", style=solid]; +"185 312" -> "189 314" [label="[1, 1024, -1, -1]", style=solid]; +"185 312" -> "685 418" [label="[1, 1024, -1, -1]", style=solid]; +"186 334" -> "187 337" [label="[1, 2048, -1, -1]", style=solid]; +"187 337" -> "188 338" [label="[1, 2048, -1, -1]", style=solid]; +"188 338" -> "200 339" [label="[1, 2048, -1, -1]", style=solid]; +"189 314" -> "190 317" [label="[1, 512, -1, -1]", style=solid]; +"190 317" -> "191 318" [label="[1, 512, -1, -1]", style=solid]; +"191 318" -> "192 319" [label="[1, 512, -1, -1]", style=solid]; +"192 319" -> "193 321" [label="[1, 512, -1, -1]", style=solid]; +"193 321" -> "194 324" [label="[1, 512, -1, -1]", style=solid]; +"194 324" -> "195 325" [label="[1, 512, -1, -1]", style=solid]; +"195 325" -> "196 326" [label="[1, 512, -1, -1]", style=solid]; +"196 326" -> "197 328" [label="[1, 512, -1, -1]", style=solid]; +"197 328" -> "198 331" [label="[1, 2048, -1, -1]", style=solid]; +"198 331" -> "199 332" [label="[1, 2048, -1, -1]", style=solid]; +"199 332" -> "200 339" [label="[1, 2048, -1, -1]", style=solid]; +"200 339" -> "201 340" [label="[1, 2048, -1, -1]", style=solid]; +"201 340" -> "202 342" [label="[1, 2048, -1, -1]", style=solid]; +"201 340" -> "213 361" [label="[1, 2048, -1, -1]", style=solid]; +"202 342" -> "203 345" [label="[1, 512, -1, -1]", style=solid]; +"203 345" -> "204 346" [label="[1, 512, -1, -1]", style=solid]; +"204 346" -> "205 347" [label="[1, 512, -1, -1]", style=solid]; +"205 347" -> "206 349" [label="[1, 512, -1, -1]", style=solid]; +"206 349" -> "207 352" [label="[1, 512, -1, -1]", style=solid]; +"207 352" -> "208 353" [label="[1, 512, -1, -1]", style=solid]; +"208 353" -> "209 354" [label="[1, 512, -1, -1]", style=solid]; +"209 354" -> "210 356" [label="[1, 512, -1, -1]", style=solid]; +"210 356" -> "211 359" [label="[1, 2048, -1, -1]", style=solid]; +"211 359" -> "212 360" [label="[1, 2048, -1, -1]", style=solid]; +"212 360" -> "213 361" [label="[1, 2048, -1, -1]", style=solid]; +"213 361" -> "214 362" [label="[1, 2048, -1, -1]", style=solid]; +"214 362" -> "215 364" [label="[1, 2048, -1, -1]", style=solid]; +"214 362" -> "226 383" [label="[1, 2048, -1, -1]", style=solid]; +"215 364" -> "216 367" [label="[1, 512, -1, -1]", style=solid]; +"216 367" -> "217 368" [label="[1, 512, -1, -1]", style=solid]; +"217 368" -> "218 369" [label="[1, 512, -1, -1]", style=solid]; +"218 369" -> "219 371" [label="[1, 512, -1, -1]", style=solid]; +"219 371" -> "220 374" [label="[1, 512, -1, -1]", style=solid]; +"220 374" -> "221 375" [label="[1, 512, -1, -1]", style=solid]; +"221 375" -> "222 376" [label="[1, 512, -1, -1]", style=solid]; +"222 376" -> "223 378" [label="[1, 512, -1, -1]", style=solid]; +"223 378" -> "224 381" [label="[1, 2048, -1, -1]", style=solid]; +"224 381" -> "225 382" [label="[1, 2048, -1, -1]", style=solid]; +"225 382" -> "226 383" [label="[1, 2048, -1, -1]", style=solid]; +"226 383" -> "227 384" [label="[1, 2048, -1, -1]", style=solid]; +"227 384" -> "228 387" [label="[1, 2048, -1, -1]", style=solid]; +"228 387" -> "229 390" [label="[1, 256, -1, -1]", style=solid]; +"228 387" -> "686 407" [label="[1, 256, -1, -1]", style=solid]; +"228 387" -> "689 398" [label="[1, 256, -1, -1]", style=solid]; +"228 387" -> "693 392" [label="[1, 256, -1, -1]", style=solid]; +"228 387" -> "701 415" [label="[1, 256, -1, -1]", style=solid]; +"229 390" -> "230 487" [label="[1, 256, -1, -1]", style=solid]; +"229 390" -> "458 506" [label="[1, 256, -1, -1]", style=solid]; +"229 390" -> "518 536" [label="[1, 256, -1, -1]", style=solid]; +"229 390" -> "521 533" [label="[1, 256, -1, -1]", style=solid]; +"229 390" -> "1597 2620" [label="[1, 256, -1, -1]", style=solid]; +"229 390" -> "3502 6664" [label="[1, 256, -1, -1]", style=solid]; +"230 487" -> "231 510" [label="[1, 256, -1, -1]", style=solid]; +"230 487" -> "291 542" [label="[1, 256, -1, -1]", style=solid]; +"230 487" -> "294 539" [label="[1, 256, -1, -1]", style=solid]; +"231 510" -> "232 511" [label="[1, 256, -1, -1]", style=solid]; +"232 511" -> "233 512" [label="[1, 256, -1, -1]", style=solid]; +"232 511" -> "280 513" [label="[1, 256, -1, -1]", style=solid]; +"233 512" -> "234 2154" [label="[1, 3, -1, -1]", style=solid]; +"233 512" -> "245 2160" [label="[1, 3, -1, -1]", style=solid]; +"233 512" -> "248 2157" [label="[1, 3, -1, -1]", style=solid]; +"233 512" -> "255 2170" [label="[1, 3, -1, -1]", style=solid]; "234 2154" -> "235 2155" [label="[4]", style=dashed]; "235 2155" -> "236 2394" [label="[]", style=dashed]; "235 2155" -> "239 2180" [label="[]", style=dashed]; @@ -4187,13 +4187,13 @@ strict digraph { "252 2165" -> "254 2169" [label="[1]", style=dashed]; "253 2164" -> "254 2169" [label="[1]", style=dashed]; "254 2169" -> "255 2170" [label="[5]", style=dashed]; -"255 2170" -> "256 2171" [label="[1]", style=solid]; -"256 2171" -> "257 2178" [label="[1]", style=solid]; -"257 2178" -> "258 2183" [label="[1]", style=solid]; -"258 2183" -> "259 2184" [label="[1]", style=solid]; -"259 2184" -> "260 2202" [label="[1]", style=solid]; -"259 2184" -> "268 2212" [label="[1]", style=solid]; -"260 2202" -> "261 2204" [label="[1]", style=dashed]; +"255 2170" -> "256 2171" [label="[]", style=solid]; +"256 2171" -> "257 2178" [label="[]", style=solid]; +"257 2178" -> "258 2183" [label="[]", style=solid]; +"258 2183" -> "259 2184" [label="[]", style=solid]; +"259 2184" -> "260 2202" [label="[]", style=solid]; +"259 2184" -> "268 2212" [label="[]", style=solid]; +"260 2202" -> "261 2204" [label="[-1]", style=dashed]; "261 2204" -> "262 2205" [label="[]", style=dashed]; "262 2205" -> "263 2207" [label="[1]", style=dashed]; "263 2207" -> "264 2208" [label="[2]", style=dashed]; @@ -4201,10 +4201,10 @@ strict digraph { "265 2209" -> "266 2210" [label="[]", style=dashed]; "266 2210" -> "267 2211" [label="[]", style=dashed]; "267 2211" -> "268 2212" [label="[1]", style=dashed]; -"268 2212" -> "269 2213" [label="[1]", style=dashed]; -"268 2212" -> "444 2402" [label="[1]", style=solid]; -"269 2213" -> "284 2214" [label="[1]", style=dashed]; -"269 2213" -> "345 2223" [label="[1]", style=dashed]; +"268 2212" -> "269 2213" [label="[]", style=dashed]; +"268 2212" -> "444 2402" [label="[]", style=solid]; +"269 2213" -> "284 2214" [label="[]", style=dashed]; +"269 2213" -> "345 2223" [label="[]", style=dashed]; "270 2199" -> "273 2200" [label="[1]", style=dashed]; "271 2198" -> "273 2200" [label="[1]", style=dashed]; "272 2197" -> "273 2200" [label="[1]", style=dashed]; @@ -4215,16 +4215,16 @@ strict digraph { "277 2188" -> "279 2192" [label="[1]", style=dashed]; "278 2187" -> "279 2192" [label="[1]", style=dashed]; "279 2192" -> "281 2193" [label="[5]", style=dashed]; -"280 513" -> "281 2193" [label="[1]", style=solid]; -"281 2193" -> "282 2194" [label="[1]", style=solid]; -"282 2194" -> "283 2201" [label="[1]", style=solid]; -"283 2201" -> "284 2214" [label="[1]", style=solid]; -"284 2214" -> "285 2225" [label="[1]", style=solid]; -"285 2225" -> "286 2390" [label="[1]", style=solid]; -"285 2225" -> "356 2317" [label="[1]", style=solid]; -"285 2225" -> "370 2297" [label="[1]", style=solid]; -"285 2225" -> "385 2307" [label="[1]", style=solid]; -"285 2225" -> "399 2287" [label="[1]", style=solid]; +"280 513" -> "281 2193" [label="[1, 12, -1, -1]", style=solid]; +"281 2193" -> "282 2194" [label="[]", style=solid]; +"282 2194" -> "283 2201" [label="[]", style=solid]; +"283 2201" -> "284 2214" [label="[]", style=solid]; +"284 2214" -> "285 2225" [label="[]", style=solid]; +"285 2225" -> "286 2390" [label="[-1, 4]", style=solid]; +"285 2225" -> "356 2317" [label="[-1, 4]", style=solid]; +"285 2225" -> "370 2297" [label="[-1, 4]", style=solid]; +"285 2225" -> "385 2307" [label="[-1, 4]", style=solid]; +"285 2225" -> "399 2287" [label="[-1, 4]", style=solid]; "286 2390" -> "413 2391" [label="[2]", style=dashed]; "287 2220" -> "290 2221" [label="[1]", style=dashed]; "288 2219" -> "290 2221" [label="[1]", style=dashed]; @@ -4249,19 +4249,19 @@ strict digraph { "301 806" -> "302 807" [label="[]", style=dashed]; "302 807" -> "303 808" [label="[1]", style=dashed]; "303 808" -> "304 809" [label="[1]", style=dashed]; -"304 809" -> "305 810" [label="[1]", style=solid]; -"305 810" -> "306 811" [label="[1]", style=solid]; -"306 811" -> "307 812" [label="[1]", style=dashed]; -"307 812" -> "308 817" [label="[1]", style=dashed]; -"308 817" -> "309 818" [label="[1]", style=dashed]; -"309 818" -> "310 820" [label="[1]", style=solid]; -"310 820" -> "311 821" [label="[1]", style=solid]; -"311 821" -> "312 823" [label="[1]", style=solid]; -"312 823" -> "313 827" [label="[1]", style=solid]; -"313 827" -> "314 837" [label="[1]", style=solid]; -"314 837" -> "315 841" [label="[1]", style=solid]; -"314 837" -> "337 839" [label="[1]", style=solid]; -"315 841" -> "339 842" [label="[1]", style=solid]; +"304 809" -> "305 810" [label="[-1]", style=solid]; +"305 810" -> "306 811" [label="[-1]", style=solid]; +"306 811" -> "307 812" [label="[1, -1]", style=dashed]; +"307 812" -> "308 817" [label="[-1, 1]", style=dashed]; +"308 817" -> "309 818" [label="[]", style=dashed]; +"309 818" -> "310 820" [label="[]", style=solid]; +"310 820" -> "311 821" [label="[]", style=solid]; +"311 821" -> "312 823" [label="[]", style=solid]; +"312 823" -> "313 827" [label="[-1, 1]", style=solid]; +"313 827" -> "314 837" [label="[-1, -1]", style=solid]; +"314 837" -> "315 841" [label="[-1]", style=solid]; +"314 837" -> "337 839" [label="[-1]", style=solid]; +"315 841" -> "339 842" [label="[-1, 1]", style=solid]; "316 831" -> "318 832" [label="[1]", style=dashed]; "317 830" -> "318 832" [label="[1]", style=dashed]; "318 832" -> "334 833" [label="[2]", style=dashed]; @@ -4271,169 +4271,169 @@ strict digraph { "322 789" -> "323 790" [label="[]", style=dashed]; "323 790" -> "324 791" [label="[1]", style=dashed]; "324 791" -> "325 792" [label="[1]", style=dashed]; -"325 792" -> "326 793" [label="[1]", style=solid]; -"326 793" -> "327 794" [label="[1]", style=solid]; -"327 794" -> "328 795" [label="[1]", style=dashed]; -"328 795" -> "329 800" [label="[1]", style=dashed]; -"329 800" -> "330 801" [label="[1]", style=dashed]; -"330 801" -> "331 803" [label="[1]", style=solid]; -"331 803" -> "332 804" [label="[1]", style=solid]; -"332 804" -> "333 829" [label="[1]", style=solid]; -"333 829" -> "334 833" [label="[1]", style=solid]; -"334 833" -> "335 835" [label="[1]", style=solid]; -"335 835" -> "336 840" [label="[1]", style=solid]; -"335 835" -> "338 838" [label="[1]", style=solid]; -"336 840" -> "339 842" [label="[1]", style=solid]; -"337 839" -> "339 842" [label="[1]", style=solid]; -"338 838" -> "339 842" [label="[1]", style=solid]; -"339 842" -> "340 844" [label="[1]", style=solid]; -"340 844" -> "341 846" [label="[1]", style=solid]; -"341 846" -> "342 848" [label="[1]", style=solid]; -"342 848" -> "343 2215" [label="[1]", style=solid]; -"343 2215" -> "344 2222" [label="[1]", style=solid]; -"344 2222" -> "345 2223" [label="[1]", style=solid]; -"345 2223" -> "346 2227" [label="[1]", style=solid]; -"346 2227" -> "347 2228" [label="[1]", style=solid]; -"347 2228" -> "348 2257" [label="[1]", style=solid]; -"347 2228" -> "350 2250" [label="[1]", style=solid]; -"347 2228" -> "363 2277" [label="[1]", style=solid]; -"347 2228" -> "377 2240" [label="[1]", style=solid]; -"347 2228" -> "379 2233" [label="[1]", style=solid]; -"347 2228" -> "392 2267" [label="[1]", style=solid]; -"348 2257" -> "349 2259" [label="[1]", style=solid]; -"349 2259" -> "352 2260" [label="[1]", style=solid]; -"350 2250" -> "351 2252" [label="[1]", style=solid]; -"351 2252" -> "352 2260" [label="[1]", style=solid]; -"352 2260" -> "353 2262" [label="[1]", style=solid]; -"353 2262" -> "354 2366" [label="[1]", style=solid]; -"353 2262" -> "362 2281" [label="[1]", style=solid]; -"353 2262" -> "368 2343" [label="[1]", style=solid]; -"354 2366" -> "355 2367" [label="[1]", style=solid]; -"355 2367" -> "360 2368" [label="[1]", style=solid]; -"356 2317" -> "357 2322" [label="[1]", style=solid]; -"357 2322" -> "358 2324" [label="[1]", style=solid]; -"358 2324" -> "359 2361" [label="[1]", style=solid]; -"359 2361" -> "360 2368" [label="[1]", style=solid]; -"360 2368" -> "361 2381" [label="[1]", style=solid]; -"360 2368" -> "406 2373" [label="[1]", style=solid]; -"361 2381" -> "374 2382" [label="[1]", style=solid]; -"362 2281" -> "365 2282" [label="[1]", style=solid]; -"363 2277" -> "364 2279" [label="[1]", style=solid]; -"364 2279" -> "365 2282" [label="[1]", style=solid]; -"365 2282" -> "366 2350" [label="[1]", style=solid]; -"366 2350" -> "367 2351" [label="[1]", style=solid]; -"367 2351" -> "373 2352" [label="[1]", style=solid]; -"368 2343" -> "369 2344" [label="[1]", style=solid]; -"369 2344" -> "372 2345" [label="[1]", style=solid]; -"370 2297" -> "371 2302" [label="[1]", style=solid]; -"371 2302" -> "372 2345" [label="[1]", style=solid]; -"372 2345" -> "373 2352" [label="[1]", style=solid]; -"373 2352" -> "374 2382" [label="[1]", style=solid]; -"373 2352" -> "407 2374" [label="[1]", style=solid]; -"374 2382" -> "375 2384" [label="[1]", style=solid]; -"375 2384" -> "376 2388" [label="[1]", style=solid]; -"376 2388" -> "412 2389" [label="[1]", style=solid]; -"377 2240" -> "378 2242" [label="[1]", style=solid]; -"378 2242" -> "381 2243" [label="[1]", style=solid]; -"379 2233" -> "380 2235" [label="[1]", style=solid]; -"380 2235" -> "381 2243" [label="[1]", style=solid]; -"381 2243" -> "382 2245" [label="[1]", style=solid]; -"382 2245" -> "383 2358" [label="[1]", style=solid]; -"382 2245" -> "391 2271" [label="[1]", style=solid]; -"382 2245" -> "397 2329" [label="[1]", style=solid]; -"383 2358" -> "384 2359" [label="[1]", style=solid]; -"384 2359" -> "389 2360" [label="[1]", style=solid]; -"385 2307" -> "386 2312" [label="[1]", style=solid]; -"386 2312" -> "387 2323" [label="[1]", style=solid]; -"387 2323" -> "388 2353" [label="[1]", style=solid]; -"388 2353" -> "389 2360" [label="[1]", style=solid]; -"389 2360" -> "390 2376" [label="[1]", style=solid]; -"389 2360" -> "409 2370" [label="[1]", style=solid]; -"390 2376" -> "403 2377" [label="[1]", style=solid]; -"391 2271" -> "394 2272" [label="[1]", style=solid]; -"392 2267" -> "393 2269" [label="[1]", style=solid]; -"393 2269" -> "394 2272" [label="[1]", style=solid]; -"394 2272" -> "395 2336" [label="[1]", style=solid]; -"395 2336" -> "396 2337" [label="[1]", style=solid]; -"396 2337" -> "402 2338" [label="[1]", style=solid]; -"397 2329" -> "398 2330" [label="[1]", style=solid]; -"398 2330" -> "401 2331" [label="[1]", style=solid]; -"399 2287" -> "400 2292" [label="[1]", style=solid]; -"400 2292" -> "401 2331" [label="[1]", style=solid]; -"401 2331" -> "402 2338" [label="[1]", style=solid]; -"402 2338" -> "403 2377" [label="[1]", style=solid]; -"402 2338" -> "410 2371" [label="[1]", style=solid]; -"403 2377" -> "404 2379" [label="[1]", style=solid]; -"404 2379" -> "405 2387" [label="[1]", style=solid]; -"405 2387" -> "412 2389" [label="[1]", style=solid]; -"406 2373" -> "407 2374" [label="[1]", style=solid]; -"407 2374" -> "408 2386" [label="[1]", style=solid]; -"408 2386" -> "412 2389" [label="[1]", style=solid]; -"409 2370" -> "410 2371" [label="[1]", style=solid]; -"410 2371" -> "411 2385" [label="[1]", style=solid]; -"411 2385" -> "412 2389" [label="[1]", style=solid]; -"412 2389" -> "413 2391" [label="[1]", style=solid]; -"413 2391" -> "414 2398" [label="[1]", style=solid]; -"414 2398" -> "415 2400" [label="[1]", style=solid]; -"415 2400" -> "416 2418" [label="[1]", style=solid]; -"415 2400" -> "420 2407" [label="[1]", style=solid]; -"416 2418" -> "417 2423" [label="[1]", style=solid]; -"417 2423" -> "418 2424" [label="[1]", style=solid]; -"418 2424" -> "419 2426" [label="[1]", style=solid]; -"419 2426" -> "424 2427" [label="[1]", style=solid]; -"420 2407" -> "421 2412" [label="[1]", style=solid]; -"421 2412" -> "422 2413" [label="[1]", style=solid]; -"422 2413" -> "423 2425" [label="[1]", style=solid]; -"423 2425" -> "424 2427" [label="[1]", style=solid]; -"424 2427" -> "425 2429" [label="[1]", style=solid]; -"425 2429" -> "426 2430" [label="[1]", style=solid]; -"425 2429" -> "449 2458" [label="[1]", style=solid]; -"426 2430" -> "427 2434" [label="[1]", style=solid]; -"426 2430" -> "429 2431" [label="[1]", style=solid]; -"426 2430" -> "431 2437" [label="[1]", style=solid]; -"427 2434" -> "428 2436" [label="[1]", style=solid]; -"428 2436" -> "431 2437" [label="[1]", style=solid]; -"429 2431" -> "430 2433" [label="[1]", style=solid]; -"430 2433" -> "431 2437" [label="[1]", style=solid]; -"431 2437" -> "432 2438" [label="[1]", style=solid]; -"432 2438" -> "433 2440" [label="[1]", style=solid]; -"432 2438" -> "436 2439" [label="[1]", style=solid]; -"433 2440" -> "434 2446" [label="[1]", style=solid]; -"434 2446" -> "435 2447" [label="[1]", style=dashed]; -"435 2447" -> "439 2451" [label="[1]", style=dashed]; -"436 2439" -> "437 2442" [label="[1]", style=solid]; -"437 2442" -> "438 2443" [label="[1]", style=dashed]; -"438 2443" -> "439 2451" [label="[1]", style=dashed]; -"439 2451" -> "440 2453" [label="[1]", style=dashed]; -"440 2453" -> "441 2454" [label="[1]", style=solid]; -"441 2454" -> "442 2455" [label="[1]", style=dashed]; -"442 2455" -> "443 2456" [label="[1]", style=dashed]; -"443 2456" -> "445 2460" [label="[1]", style=dashed]; -"443 2456" -> "448 2457" [label="[1]", style=dashed]; -"444 2402" -> "445 2460" [label="[1]", style=solid]; -"445 2460" -> "446 2462" [label="[1]", style=solid]; -"445 2460" -> "455 2478" [label="[1]", style=solid]; -"446 2462" -> "447 2463" [label="[1]", style=solid]; -"447 2463" -> "451 2466" [label="[1]", style=solid]; -"448 2457" -> "449 2458" [label="[1]", style=dashed]; -"449 2458" -> "450 2461" [label="[1]", style=solid]; -"449 2458" -> "1431 2476" [label="[1]", style=solid]; -"450 2461" -> "451 2466" [label="[1]", style=solid]; -"451 2466" -> "452 2468" [label="[1]", style=dashed]; -"452 2468" -> "453 2469" [label="[1]", style=dashed]; -"453 2469" -> "454 2474" [label="[1]", style=dashed]; -"454 2474" -> "455 2478" [label="[1]", style=dashed]; -"454 2474" -> "1430 2475" [label="[1]", style=dashed]; -"455 2478" -> "1421 2480" [label="[1]", style=solid]; +"325 792" -> "326 793" [label="[-1]", style=solid]; +"326 793" -> "327 794" [label="[-1]", style=solid]; +"327 794" -> "328 795" [label="[1, -1]", style=dashed]; +"328 795" -> "329 800" [label="[-1, 1]", style=dashed]; +"329 800" -> "330 801" [label="[]", style=dashed]; +"330 801" -> "331 803" [label="[]", style=solid]; +"331 803" -> "332 804" [label="[]", style=solid]; +"332 804" -> "333 829" [label="[]", style=solid]; +"333 829" -> "334 833" [label="[1, -1]", style=solid]; +"334 833" -> "335 835" [label="[-1, -1]", style=solid]; +"335 835" -> "336 840" [label="[-1]", style=solid]; +"335 835" -> "338 838" [label="[-1]", style=solid]; +"336 840" -> "339 842" [label="[-1, 1]", style=solid]; +"337 839" -> "339 842" [label="[-1, 1]", style=solid]; +"338 838" -> "339 842" [label="[-1, 1]", style=solid]; +"339 842" -> "340 844" [label="[-1, 4]", style=solid]; +"340 844" -> "341 846" [label="[-1, 1, 4]", style=solid]; +"341 846" -> "342 848" [label="[-1, 3, 4]", style=solid]; +"342 848" -> "343 2215" [label="[-1, 4]", style=solid]; +"343 2215" -> "344 2222" [label="[-1, 4]", style=solid]; +"344 2222" -> "345 2223" [label="[]", style=solid]; +"345 2223" -> "346 2227" [label="[]", style=solid]; +"346 2227" -> "347 2228" [label="[-1, 4]", style=solid]; +"347 2228" -> "348 2257" [label="[-1, 4]", style=solid]; +"347 2228" -> "350 2250" [label="[-1, 4]", style=solid]; +"347 2228" -> "363 2277" [label="[-1, 4]", style=solid]; +"347 2228" -> "377 2240" [label="[-1, 4]", style=solid]; +"347 2228" -> "379 2233" [label="[-1, 4]", style=solid]; +"347 2228" -> "392 2267" [label="[-1, 4]", style=solid]; +"348 2257" -> "349 2259" [label="[-1, 4]", style=solid]; +"349 2259" -> "352 2260" [label="[-1]", style=solid]; +"350 2250" -> "351 2252" [label="[-1, 4]", style=solid]; +"351 2252" -> "352 2260" [label="[-1]", style=solid]; +"352 2260" -> "353 2262" [label="[-1]", style=solid]; +"353 2262" -> "354 2366" [label="[-1]", style=solid]; +"353 2262" -> "362 2281" [label="[-1]", style=solid]; +"353 2262" -> "368 2343" [label="[-1]", style=solid]; +"354 2366" -> "355 2367" [label="[-1]", style=solid]; +"355 2367" -> "360 2368" [label="[-1, 1]", style=solid]; +"356 2317" -> "357 2322" [label="[-1, 4]", style=solid]; +"357 2322" -> "358 2324" [label="[-1, 1]", style=solid]; +"358 2324" -> "359 2361" [label="[-1, 1]", style=solid]; +"359 2361" -> "360 2368" [label="[-1, 1]", style=solid]; +"360 2368" -> "361 2381" [label="[-1, 1]", style=solid]; +"360 2368" -> "406 2373" [label="[-1, 1]", style=solid]; +"361 2381" -> "374 2382" [label="[-1, 1]", style=solid]; +"362 2281" -> "365 2282" [label="[-1]", style=solid]; +"363 2277" -> "364 2279" [label="[-1, 4]", style=solid]; +"364 2279" -> "365 2282" [label="[-1]", style=solid]; +"365 2282" -> "366 2350" [label="[-1]", style=solid]; +"366 2350" -> "367 2351" [label="[-1]", style=solid]; +"367 2351" -> "373 2352" [label="[-1, 1]", style=solid]; +"368 2343" -> "369 2344" [label="[-1]", style=solid]; +"369 2344" -> "372 2345" [label="[-1, 1]", style=solid]; +"370 2297" -> "371 2302" [label="[-1, 4]", style=solid]; +"371 2302" -> "372 2345" [label="[-1, 1]", style=solid]; +"372 2345" -> "373 2352" [label="[-1, 1]", style=solid]; +"373 2352" -> "374 2382" [label="[-1, 1]", style=solid]; +"373 2352" -> "407 2374" [label="[-1, 1]", style=solid]; +"374 2382" -> "375 2384" [label="[-1, 1]", style=solid]; +"375 2384" -> "376 2388" [label="[-1, 1]", style=solid]; +"376 2388" -> "412 2389" [label="[-1, 1, 1]", style=solid]; +"377 2240" -> "378 2242" [label="[-1, 4]", style=solid]; +"378 2242" -> "381 2243" [label="[-1]", style=solid]; +"379 2233" -> "380 2235" [label="[-1, 4]", style=solid]; +"380 2235" -> "381 2243" [label="[-1]", style=solid]; +"381 2243" -> "382 2245" [label="[-1]", style=solid]; +"382 2245" -> "383 2358" [label="[-1]", style=solid]; +"382 2245" -> "391 2271" [label="[-1]", style=solid]; +"382 2245" -> "397 2329" [label="[-1]", style=solid]; +"383 2358" -> "384 2359" [label="[-1]", style=solid]; +"384 2359" -> "389 2360" [label="[-1, 1]", style=solid]; +"385 2307" -> "386 2312" [label="[-1, 4]", style=solid]; +"386 2312" -> "387 2323" [label="[-1, 1]", style=solid]; +"387 2323" -> "388 2353" [label="[-1, 1]", style=solid]; +"388 2353" -> "389 2360" [label="[-1, 1]", style=solid]; +"389 2360" -> "390 2376" [label="[-1, 1]", style=solid]; +"389 2360" -> "409 2370" [label="[-1, 1]", style=solid]; +"390 2376" -> "403 2377" [label="[-1, 1]", style=solid]; +"391 2271" -> "394 2272" [label="[-1]", style=solid]; +"392 2267" -> "393 2269" [label="[-1, 4]", style=solid]; +"393 2269" -> "394 2272" [label="[-1]", style=solid]; +"394 2272" -> "395 2336" [label="[-1]", style=solid]; +"395 2336" -> "396 2337" [label="[-1]", style=solid]; +"396 2337" -> "402 2338" [label="[-1, 1]", style=solid]; +"397 2329" -> "398 2330" [label="[-1]", style=solid]; +"398 2330" -> "401 2331" [label="[-1, 1]", style=solid]; +"399 2287" -> "400 2292" [label="[-1, 4]", style=solid]; +"400 2292" -> "401 2331" [label="[-1, 1]", style=solid]; +"401 2331" -> "402 2338" [label="[-1, 1]", style=solid]; +"402 2338" -> "403 2377" [label="[-1, 1]", style=solid]; +"402 2338" -> "410 2371" [label="[-1, 1]", style=solid]; +"403 2377" -> "404 2379" [label="[-1, 1]", style=solid]; +"404 2379" -> "405 2387" [label="[-1, 1]", style=solid]; +"405 2387" -> "412 2389" [label="[-1, 1, 1]", style=solid]; +"406 2373" -> "407 2374" [label="[-1, 1]", style=solid]; +"407 2374" -> "408 2386" [label="[-1, 1]", style=solid]; +"408 2386" -> "412 2389" [label="[-1, 1, 1]", style=solid]; +"409 2370" -> "410 2371" [label="[-1, 1]", style=solid]; +"410 2371" -> "411 2385" [label="[-1, 1]", style=solid]; +"411 2385" -> "412 2389" [label="[-1, 1, 1]", style=solid]; +"412 2389" -> "413 2391" [label="[-1, 1, 4]", style=solid]; +"413 2391" -> "414 2398" [label="[]", style=solid]; +"414 2398" -> "415 2400" [label="[]", style=solid]; +"415 2400" -> "416 2418" [label="[]", style=solid]; +"415 2400" -> "420 2407" [label="[]", style=solid]; +"416 2418" -> "417 2423" [label="[]", style=solid]; +"417 2423" -> "418 2424" [label="[]", style=solid]; +"418 2424" -> "419 2426" [label="[]", style=solid]; +"419 2426" -> "424 2427" [label="[]", style=solid]; +"420 2407" -> "421 2412" [label="[]", style=solid]; +"421 2412" -> "422 2413" [label="[]", style=solid]; +"422 2413" -> "423 2425" [label="[]", style=solid]; +"423 2425" -> "424 2427" [label="[]", style=solid]; +"424 2427" -> "425 2429" [label="[]", style=solid]; +"425 2429" -> "426 2430" [label="[-1, 4]", style=solid]; +"425 2429" -> "449 2458" [label="[-1, 4]", style=solid]; +"426 2430" -> "427 2434" [label="[-1, 1]", style=solid]; +"426 2430" -> "429 2431" [label="[-1, 1]", style=solid]; +"426 2430" -> "431 2437" [label="[-1, 1]", style=solid]; +"427 2434" -> "428 2436" [label="[-1, 1]", style=solid]; +"428 2436" -> "431 2437" [label="[-1, 1]", style=solid]; +"429 2431" -> "430 2433" [label="[-1, 1]", style=solid]; +"430 2433" -> "431 2437" [label="[-1, 1]", style=solid]; +"431 2437" -> "432 2438" [label="[-1, 4]", style=solid]; +"432 2438" -> "433 2440" [label="[-1, 1]", style=solid]; +"432 2438" -> "436 2439" [label="[-1, 1]", style=solid]; +"433 2440" -> "434 2446" [label="[-1]", style=solid]; +"434 2446" -> "435 2447" [label="[-1]", style=dashed]; +"435 2447" -> "439 2451" [label="[-1]", style=dashed]; +"436 2439" -> "437 2442" [label="[-1]", style=solid]; +"437 2442" -> "438 2443" [label="[-1]", style=dashed]; +"438 2443" -> "439 2451" [label="[-1]", style=dashed]; +"439 2451" -> "440 2453" [label="[-1]", style=dashed]; +"440 2453" -> "441 2454" [label="[-1]", style=solid]; +"441 2454" -> "442 2455" [label="[1, -1]", style=dashed]; +"442 2455" -> "443 2456" [label="[-1, 1]", style=dashed]; +"443 2456" -> "445 2460" [label="[-1]", style=dashed]; +"443 2456" -> "448 2457" [label="[-1]", style=dashed]; +"444 2402" -> "445 2460" [label="[]", style=solid]; +"445 2460" -> "446 2462" [label="[]", style=solid]; +"445 2460" -> "455 2478" [label="[]", style=solid]; +"446 2462" -> "447 2463" [label="[]", style=solid]; +"447 2463" -> "451 2466" [label="[]", style=solid]; +"448 2457" -> "449 2458" [label="[-1]", style=dashed]; +"449 2458" -> "450 2461" [label="[-1, 4]", style=solid]; +"449 2458" -> "1431 2476" [label="[-1, 4]", style=solid]; +"450 2461" -> "451 2466" [label="[1, -1, 4]", style=solid]; +"451 2466" -> "452 2468" [label="[-1, 3]", style=dashed]; +"452 2468" -> "453 2469" [label="[-1, 1]", style=dashed]; +"453 2469" -> "454 2474" [label="[-1]", style=dashed]; +"454 2474" -> "455 2478" [label="[-1]", style=dashed]; +"454 2474" -> "1430 2475" [label="[-1]", style=dashed]; +"455 2478" -> "1421 2480" [label="[]", style=solid]; "456 2070" -> "464 2071" [label="[1]", style=dashed]; "457 2069" -> "464 2071" [label="[1]", style=dashed]; -"458 506" -> "459 507" [label="[1]", style=solid]; -"459 507" -> "460 508" [label="[1]", style=solid]; -"459 507" -> "507 509" [label="[1]", style=solid]; -"460 508" -> "461 1828" [label="[1]", style=solid]; -"460 508" -> "472 1834" [label="[1]", style=solid]; -"460 508" -> "475 1831" [label="[1]", style=solid]; -"460 508" -> "482 1844" [label="[1]", style=solid]; +"458 506" -> "459 507" [label="[1, 256, -1, -1]", style=solid]; +"459 507" -> "460 508" [label="[1, 256, -1, -1]", style=solid]; +"459 507" -> "507 509" [label="[1, 256, -1, -1]", style=solid]; +"460 508" -> "461 1828" [label="[1, 3, -1, -1]", style=solid]; +"460 508" -> "472 1834" [label="[1, 3, -1, -1]", style=solid]; +"460 508" -> "475 1831" [label="[1, 3, -1, -1]", style=solid]; +"460 508" -> "482 1844" [label="[1, 3, -1, -1]", style=solid]; "461 1828" -> "462 1829" [label="[4]", style=dashed]; "462 1829" -> "463 2068" [label="[]", style=dashed]; "462 1829" -> "466 1854" [label="[]", style=dashed]; @@ -4463,13 +4463,13 @@ strict digraph { "479 1839" -> "481 1843" [label="[1]", style=dashed]; "480 1838" -> "481 1843" [label="[1]", style=dashed]; "481 1843" -> "482 1844" [label="[5]", style=dashed]; -"482 1844" -> "483 1845" [label="[1]", style=solid]; -"483 1845" -> "484 1852" [label="[1]", style=solid]; -"484 1852" -> "485 1857" [label="[1]", style=solid]; -"485 1857" -> "486 1858" [label="[1]", style=solid]; -"486 1858" -> "487 1876" [label="[1]", style=solid]; -"486 1858" -> "495 1886" [label="[1]", style=solid]; -"487 1876" -> "488 1878" [label="[1]", style=dashed]; +"482 1844" -> "483 1845" [label="[]", style=solid]; +"483 1845" -> "484 1852" [label="[]", style=solid]; +"484 1852" -> "485 1857" [label="[]", style=solid]; +"485 1857" -> "486 1858" [label="[]", style=solid]; +"486 1858" -> "487 1876" [label="[]", style=solid]; +"486 1858" -> "495 1886" [label="[]", style=solid]; +"487 1876" -> "488 1878" [label="[-1]", style=dashed]; "488 1878" -> "489 1879" [label="[]", style=dashed]; "489 1879" -> "490 1881" [label="[1]", style=dashed]; "490 1881" -> "491 1882" [label="[2]", style=dashed]; @@ -4477,10 +4477,10 @@ strict digraph { "492 1883" -> "493 1884" [label="[]", style=dashed]; "493 1884" -> "494 1885" [label="[]", style=dashed]; "494 1885" -> "495 1886" [label="[1]", style=dashed]; -"495 1886" -> "496 1887" [label="[1]", style=dashed]; -"495 1886" -> "671 2076" [label="[1]", style=solid]; -"496 1887" -> "511 1888" [label="[1]", style=dashed]; -"496 1887" -> "572 1897" [label="[1]", style=dashed]; +"495 1886" -> "496 1887" [label="[]", style=dashed]; +"495 1886" -> "671 2076" [label="[]", style=solid]; +"496 1887" -> "511 1888" [label="[]", style=dashed]; +"496 1887" -> "572 1897" [label="[]", style=dashed]; "497 1873" -> "500 1874" [label="[1]", style=dashed]; "498 1872" -> "500 1874" [label="[1]", style=dashed]; "499 1871" -> "500 1874" [label="[1]", style=dashed]; @@ -4491,16 +4491,16 @@ strict digraph { "504 1862" -> "506 1866" [label="[1]", style=dashed]; "505 1861" -> "506 1866" [label="[1]", style=dashed]; "506 1866" -> "508 1867" [label="[5]", style=dashed]; -"507 509" -> "508 1867" [label="[1]", style=solid]; -"508 1867" -> "509 1868" [label="[1]", style=solid]; -"509 1868" -> "510 1875" [label="[1]", style=solid]; -"510 1875" -> "511 1888" [label="[1]", style=solid]; -"511 1888" -> "512 1899" [label="[1]", style=solid]; -"512 1899" -> "513 2064" [label="[1]", style=solid]; -"512 1899" -> "583 1991" [label="[1]", style=solid]; -"512 1899" -> "597 1971" [label="[1]", style=solid]; -"512 1899" -> "612 1981" [label="[1]", style=solid]; -"512 1899" -> "626 1961" [label="[1]", style=solid]; +"507 509" -> "508 1867" [label="[1, 12, -1, -1]", style=solid]; +"508 1867" -> "509 1868" [label="[]", style=solid]; +"509 1868" -> "510 1875" [label="[]", style=solid]; +"510 1875" -> "511 1888" [label="[]", style=solid]; +"511 1888" -> "512 1899" [label="[]", style=solid]; +"512 1899" -> "513 2064" [label="[-1, 4]", style=solid]; +"512 1899" -> "583 1991" [label="[-1, 4]", style=solid]; +"512 1899" -> "597 1971" [label="[-1, 4]", style=solid]; +"512 1899" -> "612 1981" [label="[-1, 4]", style=solid]; +"512 1899" -> "626 1961" [label="[-1, 4]", style=solid]; "513 2064" -> "640 2065" [label="[2]", style=dashed]; "514 1894" -> "517 1895" [label="[1]", style=dashed]; "515 1893" -> "517 1895" [label="[1]", style=dashed]; @@ -4525,19 +4525,19 @@ strict digraph { "528 745" -> "529 746" [label="[]", style=dashed]; "529 746" -> "530 747" [label="[1]", style=dashed]; "530 747" -> "531 748" [label="[1]", style=dashed]; -"531 748" -> "532 749" [label="[1]", style=solid]; -"532 749" -> "533 750" [label="[1]", style=solid]; -"533 750" -> "534 751" [label="[1]", style=dashed]; -"534 751" -> "535 756" [label="[1]", style=dashed]; -"535 756" -> "536 757" [label="[1]", style=dashed]; -"536 757" -> "537 759" [label="[1]", style=solid]; -"537 759" -> "538 760" [label="[1]", style=solid]; -"538 760" -> "539 762" [label="[1]", style=solid]; -"539 762" -> "540 766" [label="[1]", style=solid]; -"540 766" -> "541 776" [label="[1]", style=solid]; -"541 776" -> "542 780" [label="[1]", style=solid]; -"541 776" -> "564 778" [label="[1]", style=solid]; -"542 780" -> "566 781" [label="[1]", style=solid]; +"531 748" -> "532 749" [label="[-1]", style=solid]; +"532 749" -> "533 750" [label="[-1]", style=solid]; +"533 750" -> "534 751" [label="[1, -1]", style=dashed]; +"534 751" -> "535 756" [label="[-1, 1]", style=dashed]; +"535 756" -> "536 757" [label="[]", style=dashed]; +"536 757" -> "537 759" [label="[]", style=solid]; +"537 759" -> "538 760" [label="[]", style=solid]; +"538 760" -> "539 762" [label="[]", style=solid]; +"539 762" -> "540 766" [label="[-1, 1]", style=solid]; +"540 766" -> "541 776" [label="[-1, -1]", style=solid]; +"541 776" -> "542 780" [label="[-1]", style=solid]; +"541 776" -> "564 778" [label="[-1]", style=solid]; +"542 780" -> "566 781" [label="[-1, 1]", style=solid]; "543 770" -> "545 771" [label="[1]", style=dashed]; "544 769" -> "545 771" [label="[1]", style=dashed]; "545 771" -> "561 772" [label="[2]", style=dashed]; @@ -4547,163 +4547,163 @@ strict digraph { "549 728" -> "550 729" [label="[]", style=dashed]; "550 729" -> "551 730" [label="[1]", style=dashed]; "551 730" -> "552 731" [label="[1]", style=dashed]; -"552 731" -> "553 732" [label="[1]", style=solid]; -"553 732" -> "554 733" [label="[1]", style=solid]; -"554 733" -> "555 734" [label="[1]", style=dashed]; -"555 734" -> "556 739" [label="[1]", style=dashed]; -"556 739" -> "557 740" [label="[1]", style=dashed]; -"557 740" -> "558 742" [label="[1]", style=solid]; -"558 742" -> "559 743" [label="[1]", style=solid]; -"559 743" -> "560 768" [label="[1]", style=solid]; -"560 768" -> "561 772" [label="[1]", style=solid]; -"561 772" -> "562 774" [label="[1]", style=solid]; -"562 774" -> "563 779" [label="[1]", style=solid]; -"562 774" -> "565 777" [label="[1]", style=solid]; -"563 779" -> "566 781" [label="[1]", style=solid]; -"564 778" -> "566 781" [label="[1]", style=solid]; -"565 777" -> "566 781" [label="[1]", style=solid]; -"566 781" -> "567 783" [label="[1]", style=solid]; -"567 783" -> "568 785" [label="[1]", style=solid]; -"568 785" -> "569 787" [label="[1]", style=solid]; -"569 787" -> "570 1889" [label="[1]", style=solid]; -"570 1889" -> "571 1896" [label="[1]", style=solid]; -"571 1896" -> "572 1897" [label="[1]", style=solid]; -"572 1897" -> "573 1901" [label="[1]", style=solid]; -"573 1901" -> "574 1902" [label="[1]", style=solid]; -"574 1902" -> "575 1931" [label="[1]", style=solid]; -"574 1902" -> "577 1924" [label="[1]", style=solid]; -"574 1902" -> "590 1951" [label="[1]", style=solid]; -"574 1902" -> "604 1914" [label="[1]", style=solid]; -"574 1902" -> "606 1907" [label="[1]", style=solid]; -"574 1902" -> "619 1941" [label="[1]", style=solid]; -"575 1931" -> "576 1933" [label="[1]", style=solid]; -"576 1933" -> "579 1934" [label="[1]", style=solid]; -"577 1924" -> "578 1926" [label="[1]", style=solid]; -"578 1926" -> "579 1934" [label="[1]", style=solid]; -"579 1934" -> "580 1936" [label="[1]", style=solid]; -"580 1936" -> "581 2040" [label="[1]", style=solid]; -"580 1936" -> "589 1955" [label="[1]", style=solid]; -"580 1936" -> "595 2017" [label="[1]", style=solid]; -"581 2040" -> "582 2041" [label="[1]", style=solid]; -"582 2041" -> "587 2042" [label="[1]", style=solid]; -"583 1991" -> "584 1996" [label="[1]", style=solid]; -"584 1996" -> "585 1998" [label="[1]", style=solid]; -"585 1998" -> "586 2035" [label="[1]", style=solid]; -"586 2035" -> "587 2042" [label="[1]", style=solid]; -"587 2042" -> "588 2055" [label="[1]", style=solid]; -"587 2042" -> "633 2047" [label="[1]", style=solid]; -"588 2055" -> "601 2056" [label="[1]", style=solid]; -"589 1955" -> "592 1956" [label="[1]", style=solid]; -"590 1951" -> "591 1953" [label="[1]", style=solid]; -"591 1953" -> "592 1956" [label="[1]", style=solid]; -"592 1956" -> "593 2024" [label="[1]", style=solid]; -"593 2024" -> "594 2025" [label="[1]", style=solid]; -"594 2025" -> "600 2026" [label="[1]", style=solid]; -"595 2017" -> "596 2018" [label="[1]", style=solid]; -"596 2018" -> "599 2019" [label="[1]", style=solid]; -"597 1971" -> "598 1976" [label="[1]", style=solid]; -"598 1976" -> "599 2019" [label="[1]", style=solid]; -"599 2019" -> "600 2026" [label="[1]", style=solid]; -"600 2026" -> "601 2056" [label="[1]", style=solid]; -"600 2026" -> "634 2048" [label="[1]", style=solid]; -"601 2056" -> "602 2058" [label="[1]", style=solid]; -"602 2058" -> "603 2062" [label="[1]", style=solid]; -"603 2062" -> "639 2063" [label="[1]", style=solid]; -"604 1914" -> "605 1916" [label="[1]", style=solid]; -"605 1916" -> "608 1917" [label="[1]", style=solid]; -"606 1907" -> "607 1909" [label="[1]", style=solid]; -"607 1909" -> "608 1917" [label="[1]", style=solid]; -"608 1917" -> "609 1919" [label="[1]", style=solid]; -"609 1919" -> "610 2032" [label="[1]", style=solid]; -"609 1919" -> "618 1945" [label="[1]", style=solid]; -"609 1919" -> "624 2003" [label="[1]", style=solid]; -"610 2032" -> "611 2033" [label="[1]", style=solid]; -"611 2033" -> "616 2034" [label="[1]", style=solid]; -"612 1981" -> "613 1986" [label="[1]", style=solid]; -"613 1986" -> "614 1997" [label="[1]", style=solid]; -"614 1997" -> "615 2027" [label="[1]", style=solid]; -"615 2027" -> "616 2034" [label="[1]", style=solid]; -"616 2034" -> "617 2050" [label="[1]", style=solid]; -"616 2034" -> "636 2044" [label="[1]", style=solid]; -"617 2050" -> "630 2051" [label="[1]", style=solid]; -"618 1945" -> "621 1946" [label="[1]", style=solid]; -"619 1941" -> "620 1943" [label="[1]", style=solid]; -"620 1943" -> "621 1946" [label="[1]", style=solid]; -"621 1946" -> "622 2010" [label="[1]", style=solid]; -"622 2010" -> "623 2011" [label="[1]", style=solid]; -"623 2011" -> "629 2012" [label="[1]", style=solid]; -"624 2003" -> "625 2004" [label="[1]", style=solid]; -"625 2004" -> "628 2005" [label="[1]", style=solid]; -"626 1961" -> "627 1966" [label="[1]", style=solid]; -"627 1966" -> "628 2005" [label="[1]", style=solid]; -"628 2005" -> "629 2012" [label="[1]", style=solid]; -"629 2012" -> "630 2051" [label="[1]", style=solid]; -"629 2012" -> "637 2045" [label="[1]", style=solid]; -"630 2051" -> "631 2053" [label="[1]", style=solid]; -"631 2053" -> "632 2061" [label="[1]", style=solid]; -"632 2061" -> "639 2063" [label="[1]", style=solid]; -"633 2047" -> "634 2048" [label="[1]", style=solid]; -"634 2048" -> "635 2060" [label="[1]", style=solid]; -"635 2060" -> "639 2063" [label="[1]", style=solid]; -"636 2044" -> "637 2045" [label="[1]", style=solid]; -"637 2045" -> "638 2059" [label="[1]", style=solid]; -"638 2059" -> "639 2063" [label="[1]", style=solid]; -"639 2063" -> "640 2065" [label="[1]", style=solid]; -"640 2065" -> "641 2072" [label="[1]", style=solid]; -"641 2072" -> "642 2074" [label="[1]", style=solid]; -"642 2074" -> "643 2092" [label="[1]", style=solid]; -"642 2074" -> "647 2081" [label="[1]", style=solid]; -"643 2092" -> "644 2097" [label="[1]", style=solid]; -"644 2097" -> "645 2098" [label="[1]", style=solid]; -"645 2098" -> "646 2100" [label="[1]", style=solid]; -"646 2100" -> "651 2101" [label="[1]", style=solid]; -"647 2081" -> "648 2086" [label="[1]", style=solid]; -"648 2086" -> "649 2087" [label="[1]", style=solid]; -"649 2087" -> "650 2099" [label="[1]", style=solid]; -"650 2099" -> "651 2101" [label="[1]", style=solid]; -"651 2101" -> "652 2103" [label="[1]", style=solid]; -"652 2103" -> "653 2104" [label="[1]", style=solid]; -"652 2103" -> "676 2132" [label="[1]", style=solid]; -"653 2104" -> "654 2108" [label="[1]", style=solid]; -"653 2104" -> "656 2105" [label="[1]", style=solid]; -"653 2104" -> "658 2111" [label="[1]", style=solid]; -"654 2108" -> "655 2110" [label="[1]", style=solid]; -"655 2110" -> "658 2111" [label="[1]", style=solid]; -"656 2105" -> "657 2107" [label="[1]", style=solid]; -"657 2107" -> "658 2111" [label="[1]", style=solid]; -"658 2111" -> "659 2112" [label="[1]", style=solid]; -"659 2112" -> "660 2114" [label="[1]", style=solid]; -"659 2112" -> "663 2113" [label="[1]", style=solid]; -"660 2114" -> "661 2120" [label="[1]", style=solid]; -"661 2120" -> "662 2121" [label="[1]", style=dashed]; -"662 2121" -> "666 2125" [label="[1]", style=dashed]; -"663 2113" -> "664 2116" [label="[1]", style=solid]; -"664 2116" -> "665 2117" [label="[1]", style=dashed]; -"665 2117" -> "666 2125" [label="[1]", style=dashed]; -"666 2125" -> "667 2127" [label="[1]", style=dashed]; -"667 2127" -> "668 2128" [label="[1]", style=solid]; -"668 2128" -> "669 2129" [label="[1]", style=dashed]; -"669 2129" -> "670 2130" [label="[1]", style=dashed]; -"670 2130" -> "672 2134" [label="[1]", style=dashed]; -"670 2130" -> "675 2131" [label="[1]", style=dashed]; -"671 2076" -> "672 2134" [label="[1]", style=solid]; -"672 2134" -> "673 2136" [label="[1]", style=solid]; -"672 2134" -> "682 2152" [label="[1]", style=solid]; -"673 2136" -> "674 2137" [label="[1]", style=solid]; -"674 2137" -> "678 2140" [label="[1]", style=solid]; -"675 2131" -> "676 2132" [label="[1]", style=dashed]; -"676 2132" -> "677 2135" [label="[1]", style=solid]; -"676 2132" -> "1433 2150" [label="[1]", style=solid]; -"677 2135" -> "678 2140" [label="[1]", style=solid]; -"678 2140" -> "679 2142" [label="[1]", style=dashed]; -"679 2142" -> "680 2143" [label="[1]", style=dashed]; -"680 2143" -> "681 2148" [label="[1]", style=dashed]; -"681 2148" -> "682 2152" [label="[1]", style=dashed]; -"681 2148" -> "1432 2149" [label="[1]", style=dashed]; -"682 2152" -> "1421 2480" [label="[1]", style=solid]; +"552 731" -> "553 732" [label="[-1]", style=solid]; +"553 732" -> "554 733" [label="[-1]", style=solid]; +"554 733" -> "555 734" [label="[1, -1]", style=dashed]; +"555 734" -> "556 739" [label="[-1, 1]", style=dashed]; +"556 739" -> "557 740" [label="[]", style=dashed]; +"557 740" -> "558 742" [label="[]", style=solid]; +"558 742" -> "559 743" [label="[]", style=solid]; +"559 743" -> "560 768" [label="[]", style=solid]; +"560 768" -> "561 772" [label="[1, -1]", style=solid]; +"561 772" -> "562 774" [label="[-1, -1]", style=solid]; +"562 774" -> "563 779" [label="[-1]", style=solid]; +"562 774" -> "565 777" [label="[-1]", style=solid]; +"563 779" -> "566 781" [label="[-1, 1]", style=solid]; +"564 778" -> "566 781" [label="[-1, 1]", style=solid]; +"565 777" -> "566 781" [label="[-1, 1]", style=solid]; +"566 781" -> "567 783" [label="[-1, 4]", style=solid]; +"567 783" -> "568 785" [label="[-1, 1, 4]", style=solid]; +"568 785" -> "569 787" [label="[-1, 3, 4]", style=solid]; +"569 787" -> "570 1889" [label="[-1, 4]", style=solid]; +"570 1889" -> "571 1896" [label="[-1, 4]", style=solid]; +"571 1896" -> "572 1897" [label="[]", style=solid]; +"572 1897" -> "573 1901" [label="[]", style=solid]; +"573 1901" -> "574 1902" [label="[-1, 4]", style=solid]; +"574 1902" -> "575 1931" [label="[-1, 4]", style=solid]; +"574 1902" -> "577 1924" [label="[-1, 4]", style=solid]; +"574 1902" -> "590 1951" [label="[-1, 4]", style=solid]; +"574 1902" -> "604 1914" [label="[-1, 4]", style=solid]; +"574 1902" -> "606 1907" [label="[-1, 4]", style=solid]; +"574 1902" -> "619 1941" [label="[-1, 4]", style=solid]; +"575 1931" -> "576 1933" [label="[-1, 4]", style=solid]; +"576 1933" -> "579 1934" [label="[-1]", style=solid]; +"577 1924" -> "578 1926" [label="[-1, 4]", style=solid]; +"578 1926" -> "579 1934" [label="[-1]", style=solid]; +"579 1934" -> "580 1936" [label="[-1]", style=solid]; +"580 1936" -> "581 2040" [label="[-1]", style=solid]; +"580 1936" -> "589 1955" [label="[-1]", style=solid]; +"580 1936" -> "595 2017" [label="[-1]", style=solid]; +"581 2040" -> "582 2041" [label="[-1]", style=solid]; +"582 2041" -> "587 2042" [label="[-1, 1]", style=solid]; +"583 1991" -> "584 1996" [label="[-1, 4]", style=solid]; +"584 1996" -> "585 1998" [label="[-1, 1]", style=solid]; +"585 1998" -> "586 2035" [label="[-1, 1]", style=solid]; +"586 2035" -> "587 2042" [label="[-1, 1]", style=solid]; +"587 2042" -> "588 2055" [label="[-1, 1]", style=solid]; +"587 2042" -> "633 2047" [label="[-1, 1]", style=solid]; +"588 2055" -> "601 2056" [label="[-1, 1]", style=solid]; +"589 1955" -> "592 1956" [label="[-1]", style=solid]; +"590 1951" -> "591 1953" [label="[-1, 4]", style=solid]; +"591 1953" -> "592 1956" [label="[-1]", style=solid]; +"592 1956" -> "593 2024" [label="[-1]", style=solid]; +"593 2024" -> "594 2025" [label="[-1]", style=solid]; +"594 2025" -> "600 2026" [label="[-1, 1]", style=solid]; +"595 2017" -> "596 2018" [label="[-1]", style=solid]; +"596 2018" -> "599 2019" [label="[-1, 1]", style=solid]; +"597 1971" -> "598 1976" [label="[-1, 4]", style=solid]; +"598 1976" -> "599 2019" [label="[-1, 1]", style=solid]; +"599 2019" -> "600 2026" [label="[-1, 1]", style=solid]; +"600 2026" -> "601 2056" [label="[-1, 1]", style=solid]; +"600 2026" -> "634 2048" [label="[-1, 1]", style=solid]; +"601 2056" -> "602 2058" [label="[-1, 1]", style=solid]; +"602 2058" -> "603 2062" [label="[-1, 1]", style=solid]; +"603 2062" -> "639 2063" [label="[-1, 1, 1]", style=solid]; +"604 1914" -> "605 1916" [label="[-1, 4]", style=solid]; +"605 1916" -> "608 1917" [label="[-1]", style=solid]; +"606 1907" -> "607 1909" [label="[-1, 4]", style=solid]; +"607 1909" -> "608 1917" [label="[-1]", style=solid]; +"608 1917" -> "609 1919" [label="[-1]", style=solid]; +"609 1919" -> "610 2032" [label="[-1]", style=solid]; +"609 1919" -> "618 1945" [label="[-1]", style=solid]; +"609 1919" -> "624 2003" [label="[-1]", style=solid]; +"610 2032" -> "611 2033" [label="[-1]", style=solid]; +"611 2033" -> "616 2034" [label="[-1, 1]", style=solid]; +"612 1981" -> "613 1986" [label="[-1, 4]", style=solid]; +"613 1986" -> "614 1997" [label="[-1, 1]", style=solid]; +"614 1997" -> "615 2027" [label="[-1, 1]", style=solid]; +"615 2027" -> "616 2034" [label="[-1, 1]", style=solid]; +"616 2034" -> "617 2050" [label="[-1, 1]", style=solid]; +"616 2034" -> "636 2044" [label="[-1, 1]", style=solid]; +"617 2050" -> "630 2051" [label="[-1, 1]", style=solid]; +"618 1945" -> "621 1946" [label="[-1]", style=solid]; +"619 1941" -> "620 1943" [label="[-1, 4]", style=solid]; +"620 1943" -> "621 1946" [label="[-1]", style=solid]; +"621 1946" -> "622 2010" [label="[-1]", style=solid]; +"622 2010" -> "623 2011" [label="[-1]", style=solid]; +"623 2011" -> "629 2012" [label="[-1, 1]", style=solid]; +"624 2003" -> "625 2004" [label="[-1]", style=solid]; +"625 2004" -> "628 2005" [label="[-1, 1]", style=solid]; +"626 1961" -> "627 1966" [label="[-1, 4]", style=solid]; +"627 1966" -> "628 2005" [label="[-1, 1]", style=solid]; +"628 2005" -> "629 2012" [label="[-1, 1]", style=solid]; +"629 2012" -> "630 2051" [label="[-1, 1]", style=solid]; +"629 2012" -> "637 2045" [label="[-1, 1]", style=solid]; +"630 2051" -> "631 2053" [label="[-1, 1]", style=solid]; +"631 2053" -> "632 2061" [label="[-1, 1]", style=solid]; +"632 2061" -> "639 2063" [label="[-1, 1, 1]", style=solid]; +"633 2047" -> "634 2048" [label="[-1, 1]", style=solid]; +"634 2048" -> "635 2060" [label="[-1, 1]", style=solid]; +"635 2060" -> "639 2063" [label="[-1, 1, 1]", style=solid]; +"636 2044" -> "637 2045" [label="[-1, 1]", style=solid]; +"637 2045" -> "638 2059" [label="[-1, 1]", style=solid]; +"638 2059" -> "639 2063" [label="[-1, 1, 1]", style=solid]; +"639 2063" -> "640 2065" [label="[-1, 1, 4]", style=solid]; +"640 2065" -> "641 2072" [label="[]", style=solid]; +"641 2072" -> "642 2074" [label="[]", style=solid]; +"642 2074" -> "643 2092" [label="[]", style=solid]; +"642 2074" -> "647 2081" [label="[]", style=solid]; +"643 2092" -> "644 2097" [label="[]", style=solid]; +"644 2097" -> "645 2098" [label="[]", style=solid]; +"645 2098" -> "646 2100" [label="[]", style=solid]; +"646 2100" -> "651 2101" [label="[]", style=solid]; +"647 2081" -> "648 2086" [label="[]", style=solid]; +"648 2086" -> "649 2087" [label="[]", style=solid]; +"649 2087" -> "650 2099" [label="[]", style=solid]; +"650 2099" -> "651 2101" [label="[]", style=solid]; +"651 2101" -> "652 2103" [label="[]", style=solid]; +"652 2103" -> "653 2104" [label="[-1, 4]", style=solid]; +"652 2103" -> "676 2132" [label="[-1, 4]", style=solid]; +"653 2104" -> "654 2108" [label="[-1, 1]", style=solid]; +"653 2104" -> "656 2105" [label="[-1, 1]", style=solid]; +"653 2104" -> "658 2111" [label="[-1, 1]", style=solid]; +"654 2108" -> "655 2110" [label="[-1, 1]", style=solid]; +"655 2110" -> "658 2111" [label="[-1, 1]", style=solid]; +"656 2105" -> "657 2107" [label="[-1, 1]", style=solid]; +"657 2107" -> "658 2111" [label="[-1, 1]", style=solid]; +"658 2111" -> "659 2112" [label="[-1, 4]", style=solid]; +"659 2112" -> "660 2114" [label="[-1, 1]", style=solid]; +"659 2112" -> "663 2113" [label="[-1, 1]", style=solid]; +"660 2114" -> "661 2120" [label="[-1]", style=solid]; +"661 2120" -> "662 2121" [label="[-1]", style=dashed]; +"662 2121" -> "666 2125" [label="[-1]", style=dashed]; +"663 2113" -> "664 2116" [label="[-1]", style=solid]; +"664 2116" -> "665 2117" [label="[-1]", style=dashed]; +"665 2117" -> "666 2125" [label="[-1]", style=dashed]; +"666 2125" -> "667 2127" [label="[-1]", style=dashed]; +"667 2127" -> "668 2128" [label="[-1]", style=solid]; +"668 2128" -> "669 2129" [label="[1, -1]", style=dashed]; +"669 2129" -> "670 2130" [label="[-1, 1]", style=dashed]; +"670 2130" -> "672 2134" [label="[-1]", style=dashed]; +"670 2130" -> "675 2131" [label="[-1]", style=dashed]; +"671 2076" -> "672 2134" [label="[]", style=solid]; +"672 2134" -> "673 2136" [label="[]", style=solid]; +"672 2134" -> "682 2152" [label="[]", style=solid]; +"673 2136" -> "674 2137" [label="[]", style=solid]; +"674 2137" -> "678 2140" [label="[]", style=solid]; +"675 2131" -> "676 2132" [label="[-1]", style=dashed]; +"676 2132" -> "677 2135" [label="[-1, 4]", style=solid]; +"676 2132" -> "1433 2150" [label="[-1, 4]", style=solid]; +"677 2135" -> "678 2140" [label="[1, -1, 4]", style=solid]; +"678 2140" -> "679 2142" [label="[-1, 3]", style=dashed]; +"679 2142" -> "680 2143" [label="[-1, 1]", style=dashed]; +"680 2143" -> "681 2148" [label="[-1]", style=dashed]; +"681 2148" -> "682 2152" [label="[-1]", style=dashed]; +"681 2148" -> "1432 2149" [label="[-1]", style=dashed]; +"682 2152" -> "1421 2480" [label="[]", style=solid]; "683 1744" -> "710 1745" [label="[1]", style=dashed]; "684 1743" -> "710 1745" [label="[1]", style=dashed]; -"685 418" -> "702 419" [label="[1]", style=solid]; +"685 418" -> "702 419" [label="[1, 256, -1, -1]", style=solid]; "686 407" -> "687 410" [label="[4]", style=dashed]; "687 410" -> "688 411" [label="[2]", style=dashed]; "688 411" -> "699 413" [label="[2]", style=solid]; @@ -4719,24 +4719,24 @@ strict digraph { "698 406" -> "699 413" [label="[2]", style=solid]; "699 413" -> "700 414" [label="[2]", style=solid]; "700 414" -> "701 415" [label="[4]", style=solid]; -"701 415" -> "702 419" [label="[1]", style=solid]; -"702 419" -> "703 422" [label="[1]", style=solid]; -"702 419" -> "932 439" [label="[1]", style=solid]; -"702 419" -> "935 430" [label="[1]", style=solid]; -"702 419" -> "939 424" [label="[1]", style=solid]; -"702 419" -> "947 447" [label="[1]", style=solid]; -"703 422" -> "704 502" [label="[1]", style=solid]; -"703 422" -> "764 530" [label="[1]", style=solid]; -"703 422" -> "767 527" [label="[1]", style=solid]; -"703 422" -> "1563 2603" [label="[1]", style=solid]; -"703 422" -> "3468 6647" [label="[1]", style=solid]; -"704 502" -> "705 503" [label="[1]", style=solid]; -"705 503" -> "706 504" [label="[1]", style=solid]; -"705 503" -> "753 505" [label="[1]", style=solid]; -"706 504" -> "707 1502" [label="[1]", style=solid]; -"706 504" -> "718 1508" [label="[1]", style=solid]; -"706 504" -> "721 1505" [label="[1]", style=solid]; -"706 504" -> "728 1518" [label="[1]", style=solid]; +"701 415" -> "702 419" [label="[-1, -1, -1, -1]", style=solid]; +"702 419" -> "703 422" [label="[-1, 256, -1, -1]", style=solid]; +"702 419" -> "932 439" [label="[-1, 256, -1, -1]", style=solid]; +"702 419" -> "935 430" [label="[-1, 256, -1, -1]", style=solid]; +"702 419" -> "939 424" [label="[-1, 256, -1, -1]", style=solid]; +"702 419" -> "947 447" [label="[-1, 256, -1, -1]", style=solid]; +"703 422" -> "704 502" [label="[-1, 256, -1, -1]", style=solid]; +"703 422" -> "764 530" [label="[-1, 256, -1, -1]", style=solid]; +"703 422" -> "767 527" [label="[-1, 256, -1, -1]", style=solid]; +"703 422" -> "1563 2603" [label="[-1, 256, -1, -1]", style=solid]; +"703 422" -> "3468 6647" [label="[-1, 256, -1, -1]", style=solid]; +"704 502" -> "705 503" [label="[-1, 256, -1, -1]", style=solid]; +"705 503" -> "706 504" [label="[-1, 256, -1, -1]", style=solid]; +"705 503" -> "753 505" [label="[-1, 256, -1, -1]", style=solid]; +"706 504" -> "707 1502" [label="[-1, 3, -1, -1]", style=solid]; +"706 504" -> "718 1508" [label="[-1, 3, -1, -1]", style=solid]; +"706 504" -> "721 1505" [label="[-1, 3, -1, -1]", style=solid]; +"706 504" -> "728 1518" [label="[-1, 3, -1, -1]", style=solid]; "707 1502" -> "708 1503" [label="[4]", style=dashed]; "708 1503" -> "709 1742" [label="[]", style=dashed]; "708 1503" -> "712 1528" [label="[]", style=dashed]; @@ -4766,13 +4766,13 @@ strict digraph { "725 1513" -> "727 1517" [label="[1]", style=dashed]; "726 1512" -> "727 1517" [label="[1]", style=dashed]; "727 1517" -> "728 1518" [label="[5]", style=dashed]; -"728 1518" -> "729 1519" [label="[1]", style=solid]; -"729 1519" -> "730 1526" [label="[1]", style=solid]; -"730 1526" -> "731 1531" [label="[1]", style=solid]; -"731 1531" -> "732 1532" [label="[1]", style=solid]; -"732 1532" -> "733 1550" [label="[1]", style=solid]; -"732 1532" -> "741 1560" [label="[1]", style=solid]; -"733 1550" -> "734 1552" [label="[1]", style=dashed]; +"728 1518" -> "729 1519" [label="[]", style=solid]; +"729 1519" -> "730 1526" [label="[]", style=solid]; +"730 1526" -> "731 1531" [label="[]", style=solid]; +"731 1531" -> "732 1532" [label="[]", style=solid]; +"732 1532" -> "733 1550" [label="[]", style=solid]; +"732 1532" -> "741 1560" [label="[]", style=solid]; +"733 1550" -> "734 1552" [label="[-1]", style=dashed]; "734 1552" -> "735 1553" [label="[]", style=dashed]; "735 1553" -> "736 1555" [label="[1]", style=dashed]; "736 1555" -> "737 1556" [label="[2]", style=dashed]; @@ -4780,10 +4780,10 @@ strict digraph { "738 1557" -> "739 1558" [label="[]", style=dashed]; "739 1558" -> "740 1559" [label="[]", style=dashed]; "740 1559" -> "741 1560" [label="[1]", style=dashed]; -"741 1560" -> "742 1561" [label="[1]", style=dashed]; -"741 1560" -> "917 1750" [label="[1]", style=solid]; -"742 1561" -> "757 1562" [label="[1]", style=dashed]; -"742 1561" -> "818 1571" [label="[1]", style=dashed]; +"741 1560" -> "742 1561" [label="[]", style=dashed]; +"741 1560" -> "917 1750" [label="[]", style=solid]; +"742 1561" -> "757 1562" [label="[]", style=dashed]; +"742 1561" -> "818 1571" [label="[]", style=dashed]; "743 1547" -> "746 1548" [label="[1]", style=dashed]; "744 1546" -> "746 1548" [label="[1]", style=dashed]; "745 1545" -> "746 1548" [label="[1]", style=dashed]; @@ -4794,16 +4794,16 @@ strict digraph { "750 1536" -> "752 1540" [label="[1]", style=dashed]; "751 1535" -> "752 1540" [label="[1]", style=dashed]; "752 1540" -> "754 1541" [label="[5]", style=dashed]; -"753 505" -> "754 1541" [label="[1]", style=solid]; -"754 1541" -> "755 1542" [label="[1]", style=solid]; -"755 1542" -> "756 1549" [label="[1]", style=solid]; -"756 1549" -> "757 1562" [label="[1]", style=solid]; -"757 1562" -> "758 1573" [label="[1]", style=solid]; -"758 1573" -> "759 1738" [label="[1]", style=solid]; -"758 1573" -> "829 1665" [label="[1]", style=solid]; -"758 1573" -> "843 1645" [label="[1]", style=solid]; -"758 1573" -> "858 1655" [label="[1]", style=solid]; -"758 1573" -> "872 1635" [label="[1]", style=solid]; +"753 505" -> "754 1541" [label="[-1, 12, -1, -1]", style=solid]; +"754 1541" -> "755 1542" [label="[]", style=solid]; +"755 1542" -> "756 1549" [label="[]", style=solid]; +"756 1549" -> "757 1562" [label="[]", style=solid]; +"757 1562" -> "758 1573" [label="[]", style=solid]; +"758 1573" -> "759 1738" [label="[-1, 4]", style=solid]; +"758 1573" -> "829 1665" [label="[-1, 4]", style=solid]; +"758 1573" -> "843 1645" [label="[-1, 4]", style=solid]; +"758 1573" -> "858 1655" [label="[-1, 4]", style=solid]; +"758 1573" -> "872 1635" [label="[-1, 4]", style=solid]; "759 1738" -> "886 1739" [label="[2]", style=dashed]; "760 1568" -> "763 1569" [label="[1]", style=dashed]; "761 1567" -> "763 1569" [label="[1]", style=dashed]; @@ -4828,19 +4828,19 @@ strict digraph { "774 684" -> "775 685" [label="[]", style=dashed]; "775 685" -> "776 686" [label="[1]", style=dashed]; "776 686" -> "777 687" [label="[1]", style=dashed]; -"777 687" -> "778 688" [label="[1]", style=solid]; -"778 688" -> "779 689" [label="[1]", style=solid]; -"779 689" -> "780 690" [label="[1]", style=dashed]; -"780 690" -> "781 695" [label="[1]", style=dashed]; -"781 695" -> "782 696" [label="[1]", style=dashed]; -"782 696" -> "783 698" [label="[1]", style=solid]; -"783 698" -> "784 699" [label="[1]", style=solid]; -"784 699" -> "785 701" [label="[1]", style=solid]; -"785 701" -> "786 705" [label="[1]", style=solid]; -"786 705" -> "787 715" [label="[1]", style=solid]; -"787 715" -> "788 719" [label="[1]", style=solid]; -"787 715" -> "810 717" [label="[1]", style=solid]; -"788 719" -> "812 720" [label="[1]", style=solid]; +"777 687" -> "778 688" [label="[-1]", style=solid]; +"778 688" -> "779 689" [label="[-1]", style=solid]; +"779 689" -> "780 690" [label="[1, -1]", style=dashed]; +"780 690" -> "781 695" [label="[-1, 1]", style=dashed]; +"781 695" -> "782 696" [label="[]", style=dashed]; +"782 696" -> "783 698" [label="[]", style=solid]; +"783 698" -> "784 699" [label="[]", style=solid]; +"784 699" -> "785 701" [label="[]", style=solid]; +"785 701" -> "786 705" [label="[-1, 1]", style=solid]; +"786 705" -> "787 715" [label="[-1, -1]", style=solid]; +"787 715" -> "788 719" [label="[-1]", style=solid]; +"787 715" -> "810 717" [label="[-1]", style=solid]; +"788 719" -> "812 720" [label="[-1, 1]", style=solid]; "789 709" -> "791 710" [label="[1]", style=dashed]; "790 708" -> "791 710" [label="[1]", style=dashed]; "791 710" -> "807 711" [label="[2]", style=dashed]; @@ -4850,163 +4850,163 @@ strict digraph { "795 667" -> "796 668" [label="[]", style=dashed]; "796 668" -> "797 669" [label="[1]", style=dashed]; "797 669" -> "798 670" [label="[1]", style=dashed]; -"798 670" -> "799 671" [label="[1]", style=solid]; -"799 671" -> "800 672" [label="[1]", style=solid]; -"800 672" -> "801 673" [label="[1]", style=dashed]; -"801 673" -> "802 678" [label="[1]", style=dashed]; -"802 678" -> "803 679" [label="[1]", style=dashed]; -"803 679" -> "804 681" [label="[1]", style=solid]; -"804 681" -> "805 682" [label="[1]", style=solid]; -"805 682" -> "806 707" [label="[1]", style=solid]; -"806 707" -> "807 711" [label="[1]", style=solid]; -"807 711" -> "808 713" [label="[1]", style=solid]; -"808 713" -> "809 718" [label="[1]", style=solid]; -"808 713" -> "811 716" [label="[1]", style=solid]; -"809 718" -> "812 720" [label="[1]", style=solid]; -"810 717" -> "812 720" [label="[1]", style=solid]; -"811 716" -> "812 720" [label="[1]", style=solid]; -"812 720" -> "813 722" [label="[1]", style=solid]; -"813 722" -> "814 724" [label="[1]", style=solid]; -"814 724" -> "815 726" [label="[1]", style=solid]; -"815 726" -> "816 1563" [label="[1]", style=solid]; -"816 1563" -> "817 1570" [label="[1]", style=solid]; -"817 1570" -> "818 1571" [label="[1]", style=solid]; -"818 1571" -> "819 1575" [label="[1]", style=solid]; -"819 1575" -> "820 1576" [label="[1]", style=solid]; -"820 1576" -> "821 1605" [label="[1]", style=solid]; -"820 1576" -> "823 1598" [label="[1]", style=solid]; -"820 1576" -> "836 1625" [label="[1]", style=solid]; -"820 1576" -> "850 1588" [label="[1]", style=solid]; -"820 1576" -> "852 1581" [label="[1]", style=solid]; -"820 1576" -> "865 1615" [label="[1]", style=solid]; -"821 1605" -> "822 1607" [label="[1]", style=solid]; -"822 1607" -> "825 1608" [label="[1]", style=solid]; -"823 1598" -> "824 1600" [label="[1]", style=solid]; -"824 1600" -> "825 1608" [label="[1]", style=solid]; -"825 1608" -> "826 1610" [label="[1]", style=solid]; -"826 1610" -> "827 1714" [label="[1]", style=solid]; -"826 1610" -> "835 1629" [label="[1]", style=solid]; -"826 1610" -> "841 1691" [label="[1]", style=solid]; -"827 1714" -> "828 1715" [label="[1]", style=solid]; -"828 1715" -> "833 1716" [label="[1]", style=solid]; -"829 1665" -> "830 1670" [label="[1]", style=solid]; -"830 1670" -> "831 1672" [label="[1]", style=solid]; -"831 1672" -> "832 1709" [label="[1]", style=solid]; -"832 1709" -> "833 1716" [label="[1]", style=solid]; -"833 1716" -> "834 1729" [label="[1]", style=solid]; -"833 1716" -> "879 1721" [label="[1]", style=solid]; -"834 1729" -> "847 1730" [label="[1]", style=solid]; -"835 1629" -> "838 1630" [label="[1]", style=solid]; -"836 1625" -> "837 1627" [label="[1]", style=solid]; -"837 1627" -> "838 1630" [label="[1]", style=solid]; -"838 1630" -> "839 1698" [label="[1]", style=solid]; -"839 1698" -> "840 1699" [label="[1]", style=solid]; -"840 1699" -> "846 1700" [label="[1]", style=solid]; -"841 1691" -> "842 1692" [label="[1]", style=solid]; -"842 1692" -> "845 1693" [label="[1]", style=solid]; -"843 1645" -> "844 1650" [label="[1]", style=solid]; -"844 1650" -> "845 1693" [label="[1]", style=solid]; -"845 1693" -> "846 1700" [label="[1]", style=solid]; -"846 1700" -> "847 1730" [label="[1]", style=solid]; -"846 1700" -> "880 1722" [label="[1]", style=solid]; -"847 1730" -> "848 1732" [label="[1]", style=solid]; -"848 1732" -> "849 1736" [label="[1]", style=solid]; -"849 1736" -> "885 1737" [label="[1]", style=solid]; -"850 1588" -> "851 1590" [label="[1]", style=solid]; -"851 1590" -> "854 1591" [label="[1]", style=solid]; -"852 1581" -> "853 1583" [label="[1]", style=solid]; -"853 1583" -> "854 1591" [label="[1]", style=solid]; -"854 1591" -> "855 1593" [label="[1]", style=solid]; -"855 1593" -> "856 1706" [label="[1]", style=solid]; -"855 1593" -> "864 1619" [label="[1]", style=solid]; -"855 1593" -> "870 1677" [label="[1]", style=solid]; -"856 1706" -> "857 1707" [label="[1]", style=solid]; -"857 1707" -> "862 1708" [label="[1]", style=solid]; -"858 1655" -> "859 1660" [label="[1]", style=solid]; -"859 1660" -> "860 1671" [label="[1]", style=solid]; -"860 1671" -> "861 1701" [label="[1]", style=solid]; -"861 1701" -> "862 1708" [label="[1]", style=solid]; -"862 1708" -> "863 1724" [label="[1]", style=solid]; -"862 1708" -> "882 1718" [label="[1]", style=solid]; -"863 1724" -> "876 1725" [label="[1]", style=solid]; -"864 1619" -> "867 1620" [label="[1]", style=solid]; -"865 1615" -> "866 1617" [label="[1]", style=solid]; -"866 1617" -> "867 1620" [label="[1]", style=solid]; -"867 1620" -> "868 1684" [label="[1]", style=solid]; -"868 1684" -> "869 1685" [label="[1]", style=solid]; -"869 1685" -> "875 1686" [label="[1]", style=solid]; -"870 1677" -> "871 1678" [label="[1]", style=solid]; -"871 1678" -> "874 1679" [label="[1]", style=solid]; -"872 1635" -> "873 1640" [label="[1]", style=solid]; -"873 1640" -> "874 1679" [label="[1]", style=solid]; -"874 1679" -> "875 1686" [label="[1]", style=solid]; -"875 1686" -> "876 1725" [label="[1]", style=solid]; -"875 1686" -> "883 1719" [label="[1]", style=solid]; -"876 1725" -> "877 1727" [label="[1]", style=solid]; -"877 1727" -> "878 1735" [label="[1]", style=solid]; -"878 1735" -> "885 1737" [label="[1]", style=solid]; -"879 1721" -> "880 1722" [label="[1]", style=solid]; -"880 1722" -> "881 1734" [label="[1]", style=solid]; -"881 1734" -> "885 1737" [label="[1]", style=solid]; -"882 1718" -> "883 1719" [label="[1]", style=solid]; -"883 1719" -> "884 1733" [label="[1]", style=solid]; -"884 1733" -> "885 1737" [label="[1]", style=solid]; -"885 1737" -> "886 1739" [label="[1]", style=solid]; -"886 1739" -> "887 1746" [label="[1]", style=solid]; -"887 1746" -> "888 1748" [label="[1]", style=solid]; -"888 1748" -> "889 1766" [label="[1]", style=solid]; -"888 1748" -> "893 1755" [label="[1]", style=solid]; -"889 1766" -> "890 1771" [label="[1]", style=solid]; -"890 1771" -> "891 1772" [label="[1]", style=solid]; -"891 1772" -> "892 1774" [label="[1]", style=solid]; -"892 1774" -> "897 1775" [label="[1]", style=solid]; -"893 1755" -> "894 1760" [label="[1]", style=solid]; -"894 1760" -> "895 1761" [label="[1]", style=solid]; -"895 1761" -> "896 1773" [label="[1]", style=solid]; -"896 1773" -> "897 1775" [label="[1]", style=solid]; -"897 1775" -> "898 1777" [label="[1]", style=solid]; -"898 1777" -> "899 1778" [label="[1]", style=solid]; -"898 1777" -> "922 1806" [label="[1]", style=solid]; -"899 1778" -> "900 1782" [label="[1]", style=solid]; -"899 1778" -> "902 1779" [label="[1]", style=solid]; -"899 1778" -> "904 1785" [label="[1]", style=solid]; -"900 1782" -> "901 1784" [label="[1]", style=solid]; -"901 1784" -> "904 1785" [label="[1]", style=solid]; -"902 1779" -> "903 1781" [label="[1]", style=solid]; -"903 1781" -> "904 1785" [label="[1]", style=solid]; -"904 1785" -> "905 1786" [label="[1]", style=solid]; -"905 1786" -> "906 1788" [label="[1]", style=solid]; -"905 1786" -> "909 1787" [label="[1]", style=solid]; -"906 1788" -> "907 1794" [label="[1]", style=solid]; -"907 1794" -> "908 1795" [label="[1]", style=dashed]; -"908 1795" -> "912 1799" [label="[1]", style=dashed]; -"909 1787" -> "910 1790" [label="[1]", style=solid]; -"910 1790" -> "911 1791" [label="[1]", style=dashed]; -"911 1791" -> "912 1799" [label="[1]", style=dashed]; -"912 1799" -> "913 1801" [label="[1]", style=dashed]; -"913 1801" -> "914 1802" [label="[1]", style=solid]; -"914 1802" -> "915 1803" [label="[1]", style=dashed]; -"915 1803" -> "916 1804" [label="[1]", style=dashed]; -"916 1804" -> "918 1808" [label="[1]", style=dashed]; -"916 1804" -> "921 1805" [label="[1]", style=dashed]; -"917 1750" -> "918 1808" [label="[1]", style=solid]; -"918 1808" -> "919 1810" [label="[1]", style=solid]; -"918 1808" -> "928 1826" [label="[1]", style=solid]; -"919 1810" -> "920 1811" [label="[1]", style=solid]; -"920 1811" -> "924 1814" [label="[1]", style=solid]; -"921 1805" -> "922 1806" [label="[1]", style=dashed]; -"922 1806" -> "923 1809" [label="[1]", style=solid]; -"922 1806" -> "1435 1824" [label="[1]", style=solid]; -"923 1809" -> "924 1814" [label="[1]", style=solid]; -"924 1814" -> "925 1816" [label="[1]", style=dashed]; -"925 1816" -> "926 1817" [label="[1]", style=dashed]; -"926 1817" -> "927 1822" [label="[1]", style=dashed]; -"927 1822" -> "928 1826" [label="[1]", style=dashed]; -"927 1822" -> "1434 1823" [label="[1]", style=dashed]; -"928 1826" -> "1421 2480" [label="[1]", style=solid]; +"798 670" -> "799 671" [label="[-1]", style=solid]; +"799 671" -> "800 672" [label="[-1]", style=solid]; +"800 672" -> "801 673" [label="[1, -1]", style=dashed]; +"801 673" -> "802 678" [label="[-1, 1]", style=dashed]; +"802 678" -> "803 679" [label="[]", style=dashed]; +"803 679" -> "804 681" [label="[]", style=solid]; +"804 681" -> "805 682" [label="[]", style=solid]; +"805 682" -> "806 707" [label="[]", style=solid]; +"806 707" -> "807 711" [label="[1, -1]", style=solid]; +"807 711" -> "808 713" [label="[-1, -1]", style=solid]; +"808 713" -> "809 718" [label="[-1]", style=solid]; +"808 713" -> "811 716" [label="[-1]", style=solid]; +"809 718" -> "812 720" [label="[-1, 1]", style=solid]; +"810 717" -> "812 720" [label="[-1, 1]", style=solid]; +"811 716" -> "812 720" [label="[-1, 1]", style=solid]; +"812 720" -> "813 722" [label="[-1, 4]", style=solid]; +"813 722" -> "814 724" [label="[-1, 1, 4]", style=solid]; +"814 724" -> "815 726" [label="[-1, 3, 4]", style=solid]; +"815 726" -> "816 1563" [label="[-1, 4]", style=solid]; +"816 1563" -> "817 1570" [label="[-1, 4]", style=solid]; +"817 1570" -> "818 1571" [label="[]", style=solid]; +"818 1571" -> "819 1575" [label="[]", style=solid]; +"819 1575" -> "820 1576" [label="[-1, 4]", style=solid]; +"820 1576" -> "821 1605" [label="[-1, 4]", style=solid]; +"820 1576" -> "823 1598" [label="[-1, 4]", style=solid]; +"820 1576" -> "836 1625" [label="[-1, 4]", style=solid]; +"820 1576" -> "850 1588" [label="[-1, 4]", style=solid]; +"820 1576" -> "852 1581" [label="[-1, 4]", style=solid]; +"820 1576" -> "865 1615" [label="[-1, 4]", style=solid]; +"821 1605" -> "822 1607" [label="[-1, 4]", style=solid]; +"822 1607" -> "825 1608" [label="[-1]", style=solid]; +"823 1598" -> "824 1600" [label="[-1, 4]", style=solid]; +"824 1600" -> "825 1608" [label="[-1]", style=solid]; +"825 1608" -> "826 1610" [label="[-1]", style=solid]; +"826 1610" -> "827 1714" [label="[-1]", style=solid]; +"826 1610" -> "835 1629" [label="[-1]", style=solid]; +"826 1610" -> "841 1691" [label="[-1]", style=solid]; +"827 1714" -> "828 1715" [label="[-1]", style=solid]; +"828 1715" -> "833 1716" [label="[-1, 1]", style=solid]; +"829 1665" -> "830 1670" [label="[-1, 4]", style=solid]; +"830 1670" -> "831 1672" [label="[-1, 1]", style=solid]; +"831 1672" -> "832 1709" [label="[-1, 1]", style=solid]; +"832 1709" -> "833 1716" [label="[-1, 1]", style=solid]; +"833 1716" -> "834 1729" [label="[-1, 1]", style=solid]; +"833 1716" -> "879 1721" [label="[-1, 1]", style=solid]; +"834 1729" -> "847 1730" [label="[-1, 1]", style=solid]; +"835 1629" -> "838 1630" [label="[-1]", style=solid]; +"836 1625" -> "837 1627" [label="[-1, 4]", style=solid]; +"837 1627" -> "838 1630" [label="[-1]", style=solid]; +"838 1630" -> "839 1698" [label="[-1]", style=solid]; +"839 1698" -> "840 1699" [label="[-1]", style=solid]; +"840 1699" -> "846 1700" [label="[-1, 1]", style=solid]; +"841 1691" -> "842 1692" [label="[-1]", style=solid]; +"842 1692" -> "845 1693" [label="[-1, 1]", style=solid]; +"843 1645" -> "844 1650" [label="[-1, 4]", style=solid]; +"844 1650" -> "845 1693" [label="[-1, 1]", style=solid]; +"845 1693" -> "846 1700" [label="[-1, 1]", style=solid]; +"846 1700" -> "847 1730" [label="[-1, 1]", style=solid]; +"846 1700" -> "880 1722" [label="[-1, 1]", style=solid]; +"847 1730" -> "848 1732" [label="[-1, 1]", style=solid]; +"848 1732" -> "849 1736" [label="[-1, 1]", style=solid]; +"849 1736" -> "885 1737" [label="[-1, 1, 1]", style=solid]; +"850 1588" -> "851 1590" [label="[-1, 4]", style=solid]; +"851 1590" -> "854 1591" [label="[-1]", style=solid]; +"852 1581" -> "853 1583" [label="[-1, 4]", style=solid]; +"853 1583" -> "854 1591" [label="[-1]", style=solid]; +"854 1591" -> "855 1593" [label="[-1]", style=solid]; +"855 1593" -> "856 1706" [label="[-1]", style=solid]; +"855 1593" -> "864 1619" [label="[-1]", style=solid]; +"855 1593" -> "870 1677" [label="[-1]", style=solid]; +"856 1706" -> "857 1707" [label="[-1]", style=solid]; +"857 1707" -> "862 1708" [label="[-1, 1]", style=solid]; +"858 1655" -> "859 1660" [label="[-1, 4]", style=solid]; +"859 1660" -> "860 1671" [label="[-1, 1]", style=solid]; +"860 1671" -> "861 1701" [label="[-1, 1]", style=solid]; +"861 1701" -> "862 1708" [label="[-1, 1]", style=solid]; +"862 1708" -> "863 1724" [label="[-1, 1]", style=solid]; +"862 1708" -> "882 1718" [label="[-1, 1]", style=solid]; +"863 1724" -> "876 1725" [label="[-1, 1]", style=solid]; +"864 1619" -> "867 1620" [label="[-1]", style=solid]; +"865 1615" -> "866 1617" [label="[-1, 4]", style=solid]; +"866 1617" -> "867 1620" [label="[-1]", style=solid]; +"867 1620" -> "868 1684" [label="[-1]", style=solid]; +"868 1684" -> "869 1685" [label="[-1]", style=solid]; +"869 1685" -> "875 1686" [label="[-1, 1]", style=solid]; +"870 1677" -> "871 1678" [label="[-1]", style=solid]; +"871 1678" -> "874 1679" [label="[-1, 1]", style=solid]; +"872 1635" -> "873 1640" [label="[-1, 4]", style=solid]; +"873 1640" -> "874 1679" [label="[-1, 1]", style=solid]; +"874 1679" -> "875 1686" [label="[-1, 1]", style=solid]; +"875 1686" -> "876 1725" [label="[-1, 1]", style=solid]; +"875 1686" -> "883 1719" [label="[-1, 1]", style=solid]; +"876 1725" -> "877 1727" [label="[-1, 1]", style=solid]; +"877 1727" -> "878 1735" [label="[-1, 1]", style=solid]; +"878 1735" -> "885 1737" [label="[-1, 1, 1]", style=solid]; +"879 1721" -> "880 1722" [label="[-1, 1]", style=solid]; +"880 1722" -> "881 1734" [label="[-1, 1]", style=solid]; +"881 1734" -> "885 1737" [label="[-1, 1, 1]", style=solid]; +"882 1718" -> "883 1719" [label="[-1, 1]", style=solid]; +"883 1719" -> "884 1733" [label="[-1, 1]", style=solid]; +"884 1733" -> "885 1737" [label="[-1, 1, 1]", style=solid]; +"885 1737" -> "886 1739" [label="[-1, 1, 4]", style=solid]; +"886 1739" -> "887 1746" [label="[]", style=solid]; +"887 1746" -> "888 1748" [label="[]", style=solid]; +"888 1748" -> "889 1766" [label="[]", style=solid]; +"888 1748" -> "893 1755" [label="[]", style=solid]; +"889 1766" -> "890 1771" [label="[]", style=solid]; +"890 1771" -> "891 1772" [label="[]", style=solid]; +"891 1772" -> "892 1774" [label="[]", style=solid]; +"892 1774" -> "897 1775" [label="[]", style=solid]; +"893 1755" -> "894 1760" [label="[]", style=solid]; +"894 1760" -> "895 1761" [label="[]", style=solid]; +"895 1761" -> "896 1773" [label="[]", style=solid]; +"896 1773" -> "897 1775" [label="[]", style=solid]; +"897 1775" -> "898 1777" [label="[]", style=solid]; +"898 1777" -> "899 1778" [label="[-1, 4]", style=solid]; +"898 1777" -> "922 1806" [label="[-1, 4]", style=solid]; +"899 1778" -> "900 1782" [label="[-1, 1]", style=solid]; +"899 1778" -> "902 1779" [label="[-1, 1]", style=solid]; +"899 1778" -> "904 1785" [label="[-1, 1]", style=solid]; +"900 1782" -> "901 1784" [label="[-1, 1]", style=solid]; +"901 1784" -> "904 1785" [label="[-1, 1]", style=solid]; +"902 1779" -> "903 1781" [label="[-1, 1]", style=solid]; +"903 1781" -> "904 1785" [label="[-1, 1]", style=solid]; +"904 1785" -> "905 1786" [label="[-1, 4]", style=solid]; +"905 1786" -> "906 1788" [label="[-1, 1]", style=solid]; +"905 1786" -> "909 1787" [label="[-1, 1]", style=solid]; +"906 1788" -> "907 1794" [label="[-1]", style=solid]; +"907 1794" -> "908 1795" [label="[-1]", style=dashed]; +"908 1795" -> "912 1799" [label="[-1]", style=dashed]; +"909 1787" -> "910 1790" [label="[-1]", style=solid]; +"910 1790" -> "911 1791" [label="[-1]", style=dashed]; +"911 1791" -> "912 1799" [label="[-1]", style=dashed]; +"912 1799" -> "913 1801" [label="[-1]", style=dashed]; +"913 1801" -> "914 1802" [label="[-1]", style=solid]; +"914 1802" -> "915 1803" [label="[1, -1]", style=dashed]; +"915 1803" -> "916 1804" [label="[-1, 1]", style=dashed]; +"916 1804" -> "918 1808" [label="[-1]", style=dashed]; +"916 1804" -> "921 1805" [label="[-1]", style=dashed]; +"917 1750" -> "918 1808" [label="[]", style=solid]; +"918 1808" -> "919 1810" [label="[]", style=solid]; +"918 1808" -> "928 1826" [label="[]", style=solid]; +"919 1810" -> "920 1811" [label="[]", style=solid]; +"920 1811" -> "924 1814" [label="[]", style=solid]; +"921 1805" -> "922 1806" [label="[-1]", style=dashed]; +"922 1806" -> "923 1809" [label="[-1, 4]", style=solid]; +"922 1806" -> "1435 1824" [label="[-1, 4]", style=solid]; +"923 1809" -> "924 1814" [label="[1, -1, 4]", style=solid]; +"924 1814" -> "925 1816" [label="[-1, 3]", style=dashed]; +"925 1816" -> "926 1817" [label="[-1, 1]", style=dashed]; +"926 1817" -> "927 1822" [label="[-1]", style=dashed]; +"927 1822" -> "928 1826" [label="[-1]", style=dashed]; +"927 1822" -> "1434 1823" [label="[-1]", style=dashed]; +"928 1826" -> "1421 2480" [label="[]", style=solid]; "929 1418" -> "956 1419" [label="[1]", style=dashed]; "930 1417" -> "956 1419" [label="[1]", style=dashed]; -"931 450" -> "948 451" [label="[1]", style=solid]; +"931 450" -> "948 451" [label="[1, 256, -1, -1]", style=solid]; "932 439" -> "933 442" [label="[4]", style=dashed]; "933 442" -> "934 443" [label="[2]", style=dashed]; "934 443" -> "945 445" [label="[2]", style=solid]; @@ -5022,24 +5022,24 @@ strict digraph { "944 438" -> "945 445" [label="[2]", style=solid]; "945 445" -> "946 446" [label="[2]", style=solid]; "946 446" -> "947 447" [label="[4]", style=solid]; -"947 447" -> "948 451" [label="[1]", style=solid]; -"948 451" -> "949 454" [label="[1]", style=solid]; -"948 451" -> "1178 471" [label="[1]", style=solid]; -"948 451" -> "1181 462" [label="[1]", style=solid]; -"948 451" -> "1185 456" [label="[1]", style=solid]; -"948 451" -> "1193 479" [label="[1]", style=solid]; -"949 454" -> "950 498" [label="[1]", style=solid]; -"949 454" -> "1010 524" [label="[1]", style=solid]; -"949 454" -> "1013 521" [label="[1]", style=solid]; -"949 454" -> "1529 2586" [label="[1]", style=solid]; -"949 454" -> "3434 6630" [label="[1]", style=solid]; -"950 498" -> "951 499" [label="[1]", style=solid]; -"951 499" -> "952 500" [label="[1]", style=solid]; -"951 499" -> "999 501" [label="[1]", style=solid]; -"952 500" -> "953 1176" [label="[1]", style=solid]; -"952 500" -> "964 1182" [label="[1]", style=solid]; -"952 500" -> "967 1179" [label="[1]", style=solid]; -"952 500" -> "974 1192" [label="[1]", style=solid]; +"947 447" -> "948 451" [label="[-1, -1, -1, -1]", style=solid]; +"948 451" -> "949 454" [label="[-1, 256, -1, -1]", style=solid]; +"948 451" -> "1178 471" [label="[-1, 256, -1, -1]", style=solid]; +"948 451" -> "1181 462" [label="[-1, 256, -1, -1]", style=solid]; +"948 451" -> "1185 456" [label="[-1, 256, -1, -1]", style=solid]; +"948 451" -> "1193 479" [label="[-1, 256, -1, -1]", style=solid]; +"949 454" -> "950 498" [label="[-1, 256, -1, -1]", style=solid]; +"949 454" -> "1010 524" [label="[-1, 256, -1, -1]", style=solid]; +"949 454" -> "1013 521" [label="[-1, 256, -1, -1]", style=solid]; +"949 454" -> "1529 2586" [label="[-1, 256, -1, -1]", style=solid]; +"949 454" -> "3434 6630" [label="[-1, 256, -1, -1]", style=solid]; +"950 498" -> "951 499" [label="[-1, 256, -1, -1]", style=solid]; +"951 499" -> "952 500" [label="[-1, 256, -1, -1]", style=solid]; +"951 499" -> "999 501" [label="[-1, 256, -1, -1]", style=solid]; +"952 500" -> "953 1176" [label="[-1, 3, -1, -1]", style=solid]; +"952 500" -> "964 1182" [label="[-1, 3, -1, -1]", style=solid]; +"952 500" -> "967 1179" [label="[-1, 3, -1, -1]", style=solid]; +"952 500" -> "974 1192" [label="[-1, 3, -1, -1]", style=solid]; "953 1176" -> "954 1177" [label="[4]", style=dashed]; "954 1177" -> "955 1416" [label="[]", style=dashed]; "954 1177" -> "958 1202" [label="[]", style=dashed]; @@ -5069,13 +5069,13 @@ strict digraph { "971 1187" -> "973 1191" [label="[1]", style=dashed]; "972 1186" -> "973 1191" [label="[1]", style=dashed]; "973 1191" -> "974 1192" [label="[5]", style=dashed]; -"974 1192" -> "975 1193" [label="[1]", style=solid]; -"975 1193" -> "976 1200" [label="[1]", style=solid]; -"976 1200" -> "977 1205" [label="[1]", style=solid]; -"977 1205" -> "978 1206" [label="[1]", style=solid]; -"978 1206" -> "979 1224" [label="[1]", style=solid]; -"978 1206" -> "987 1234" [label="[1]", style=solid]; -"979 1224" -> "980 1226" [label="[1]", style=dashed]; +"974 1192" -> "975 1193" [label="[]", style=solid]; +"975 1193" -> "976 1200" [label="[]", style=solid]; +"976 1200" -> "977 1205" [label="[]", style=solid]; +"977 1205" -> "978 1206" [label="[]", style=solid]; +"978 1206" -> "979 1224" [label="[]", style=solid]; +"978 1206" -> "987 1234" [label="[]", style=solid]; +"979 1224" -> "980 1226" [label="[-1]", style=dashed]; "980 1226" -> "981 1227" [label="[]", style=dashed]; "981 1227" -> "982 1229" [label="[1]", style=dashed]; "982 1229" -> "983 1230" [label="[2]", style=dashed]; @@ -5083,10 +5083,10 @@ strict digraph { "984 1231" -> "985 1232" [label="[]", style=dashed]; "985 1232" -> "986 1233" [label="[]", style=dashed]; "986 1233" -> "987 1234" [label="[1]", style=dashed]; -"987 1234" -> "988 1235" [label="[1]", style=dashed]; -"987 1234" -> "1163 1424" [label="[1]", style=solid]; -"988 1235" -> "1003 1236" [label="[1]", style=dashed]; -"988 1235" -> "1064 1245" [label="[1]", style=dashed]; +"987 1234" -> "988 1235" [label="[]", style=dashed]; +"987 1234" -> "1163 1424" [label="[]", style=solid]; +"988 1235" -> "1003 1236" [label="[]", style=dashed]; +"988 1235" -> "1064 1245" [label="[]", style=dashed]; "989 1221" -> "992 1222" [label="[1]", style=dashed]; "990 1220" -> "992 1222" [label="[1]", style=dashed]; "991 1219" -> "992 1222" [label="[1]", style=dashed]; @@ -5097,16 +5097,16 @@ strict digraph { "996 1210" -> "998 1214" [label="[1]", style=dashed]; "997 1209" -> "998 1214" [label="[1]", style=dashed]; "998 1214" -> "1000 1215" [label="[5]", style=dashed]; -"999 501" -> "1000 1215" [label="[1]", style=solid]; -"1000 1215" -> "1001 1216" [label="[1]", style=solid]; -"1001 1216" -> "1002 1223" [label="[1]", style=solid]; -"1002 1223" -> "1003 1236" [label="[1]", style=solid]; -"1003 1236" -> "1004 1247" [label="[1]", style=solid]; -"1004 1247" -> "1005 1412" [label="[1]", style=solid]; -"1004 1247" -> "1075 1339" [label="[1]", style=solid]; -"1004 1247" -> "1089 1319" [label="[1]", style=solid]; -"1004 1247" -> "1104 1329" [label="[1]", style=solid]; -"1004 1247" -> "1118 1309" [label="[1]", style=solid]; +"999 501" -> "1000 1215" [label="[-1, 12, -1, -1]", style=solid]; +"1000 1215" -> "1001 1216" [label="[]", style=solid]; +"1001 1216" -> "1002 1223" [label="[]", style=solid]; +"1002 1223" -> "1003 1236" [label="[]", style=solid]; +"1003 1236" -> "1004 1247" [label="[]", style=solid]; +"1004 1247" -> "1005 1412" [label="[-1, 4]", style=solid]; +"1004 1247" -> "1075 1339" [label="[-1, 4]", style=solid]; +"1004 1247" -> "1089 1319" [label="[-1, 4]", style=solid]; +"1004 1247" -> "1104 1329" [label="[-1, 4]", style=solid]; +"1004 1247" -> "1118 1309" [label="[-1, 4]", style=solid]; "1005 1412" -> "1132 1413" [label="[2]", style=dashed]; "1006 1242" -> "1009 1243" [label="[1]", style=dashed]; "1007 1241" -> "1009 1243" [label="[1]", style=dashed]; @@ -5131,19 +5131,19 @@ strict digraph { "1020 623" -> "1021 624" [label="[]", style=dashed]; "1021 624" -> "1022 625" [label="[1]", style=dashed]; "1022 625" -> "1023 626" [label="[1]", style=dashed]; -"1023 626" -> "1024 627" [label="[1]", style=solid]; -"1024 627" -> "1025 628" [label="[1]", style=solid]; -"1025 628" -> "1026 629" [label="[1]", style=dashed]; -"1026 629" -> "1027 634" [label="[1]", style=dashed]; -"1027 634" -> "1028 635" [label="[1]", style=dashed]; -"1028 635" -> "1029 637" [label="[1]", style=solid]; -"1029 637" -> "1030 638" [label="[1]", style=solid]; -"1030 638" -> "1031 640" [label="[1]", style=solid]; -"1031 640" -> "1032 644" [label="[1]", style=solid]; -"1032 644" -> "1033 654" [label="[1]", style=solid]; -"1033 654" -> "1034 658" [label="[1]", style=solid]; -"1033 654" -> "1056 656" [label="[1]", style=solid]; -"1034 658" -> "1058 659" [label="[1]", style=solid]; +"1023 626" -> "1024 627" [label="[-1]", style=solid]; +"1024 627" -> "1025 628" [label="[-1]", style=solid]; +"1025 628" -> "1026 629" [label="[1, -1]", style=dashed]; +"1026 629" -> "1027 634" [label="[-1, 1]", style=dashed]; +"1027 634" -> "1028 635" [label="[]", style=dashed]; +"1028 635" -> "1029 637" [label="[]", style=solid]; +"1029 637" -> "1030 638" [label="[]", style=solid]; +"1030 638" -> "1031 640" [label="[]", style=solid]; +"1031 640" -> "1032 644" [label="[-1, 1]", style=solid]; +"1032 644" -> "1033 654" [label="[-1, -1]", style=solid]; +"1033 654" -> "1034 658" [label="[-1]", style=solid]; +"1033 654" -> "1056 656" [label="[-1]", style=solid]; +"1034 658" -> "1058 659" [label="[-1, 1]", style=solid]; "1035 648" -> "1037 649" [label="[1]", style=dashed]; "1036 647" -> "1037 649" [label="[1]", style=dashed]; "1037 649" -> "1053 650" [label="[2]", style=dashed]; @@ -5153,163 +5153,163 @@ strict digraph { "1041 606" -> "1042 607" [label="[]", style=dashed]; "1042 607" -> "1043 608" [label="[1]", style=dashed]; "1043 608" -> "1044 609" [label="[1]", style=dashed]; -"1044 609" -> "1045 610" [label="[1]", style=solid]; -"1045 610" -> "1046 611" [label="[1]", style=solid]; -"1046 611" -> "1047 612" [label="[1]", style=dashed]; -"1047 612" -> "1048 617" [label="[1]", style=dashed]; -"1048 617" -> "1049 618" [label="[1]", style=dashed]; -"1049 618" -> "1050 620" [label="[1]", style=solid]; -"1050 620" -> "1051 621" [label="[1]", style=solid]; -"1051 621" -> "1052 646" [label="[1]", style=solid]; -"1052 646" -> "1053 650" [label="[1]", style=solid]; -"1053 650" -> "1054 652" [label="[1]", style=solid]; -"1054 652" -> "1055 657" [label="[1]", style=solid]; -"1054 652" -> "1057 655" [label="[1]", style=solid]; -"1055 657" -> "1058 659" [label="[1]", style=solid]; -"1056 656" -> "1058 659" [label="[1]", style=solid]; -"1057 655" -> "1058 659" [label="[1]", style=solid]; -"1058 659" -> "1059 661" [label="[1]", style=solid]; -"1059 661" -> "1060 663" [label="[1]", style=solid]; -"1060 663" -> "1061 665" [label="[1]", style=solid]; -"1061 665" -> "1062 1237" [label="[1]", style=solid]; -"1062 1237" -> "1063 1244" [label="[1]", style=solid]; -"1063 1244" -> "1064 1245" [label="[1]", style=solid]; -"1064 1245" -> "1065 1249" [label="[1]", style=solid]; -"1065 1249" -> "1066 1250" [label="[1]", style=solid]; -"1066 1250" -> "1067 1279" [label="[1]", style=solid]; -"1066 1250" -> "1069 1272" [label="[1]", style=solid]; -"1066 1250" -> "1082 1299" [label="[1]", style=solid]; -"1066 1250" -> "1096 1262" [label="[1]", style=solid]; -"1066 1250" -> "1098 1255" [label="[1]", style=solid]; -"1066 1250" -> "1111 1289" [label="[1]", style=solid]; -"1067 1279" -> "1068 1281" [label="[1]", style=solid]; -"1068 1281" -> "1071 1282" [label="[1]", style=solid]; -"1069 1272" -> "1070 1274" [label="[1]", style=solid]; -"1070 1274" -> "1071 1282" [label="[1]", style=solid]; -"1071 1282" -> "1072 1284" [label="[1]", style=solid]; -"1072 1284" -> "1073 1388" [label="[1]", style=solid]; -"1072 1284" -> "1081 1303" [label="[1]", style=solid]; -"1072 1284" -> "1087 1365" [label="[1]", style=solid]; -"1073 1388" -> "1074 1389" [label="[1]", style=solid]; -"1074 1389" -> "1079 1390" [label="[1]", style=solid]; -"1075 1339" -> "1076 1344" [label="[1]", style=solid]; -"1076 1344" -> "1077 1346" [label="[1]", style=solid]; -"1077 1346" -> "1078 1383" [label="[1]", style=solid]; -"1078 1383" -> "1079 1390" [label="[1]", style=solid]; -"1079 1390" -> "1080 1403" [label="[1]", style=solid]; -"1079 1390" -> "1125 1395" [label="[1]", style=solid]; -"1080 1403" -> "1093 1404" [label="[1]", style=solid]; -"1081 1303" -> "1084 1304" [label="[1]", style=solid]; -"1082 1299" -> "1083 1301" [label="[1]", style=solid]; -"1083 1301" -> "1084 1304" [label="[1]", style=solid]; -"1084 1304" -> "1085 1372" [label="[1]", style=solid]; -"1085 1372" -> "1086 1373" [label="[1]", style=solid]; -"1086 1373" -> "1092 1374" [label="[1]", style=solid]; -"1087 1365" -> "1088 1366" [label="[1]", style=solid]; -"1088 1366" -> "1091 1367" [label="[1]", style=solid]; -"1089 1319" -> "1090 1324" [label="[1]", style=solid]; -"1090 1324" -> "1091 1367" [label="[1]", style=solid]; -"1091 1367" -> "1092 1374" [label="[1]", style=solid]; -"1092 1374" -> "1093 1404" [label="[1]", style=solid]; -"1092 1374" -> "1126 1396" [label="[1]", style=solid]; -"1093 1404" -> "1094 1406" [label="[1]", style=solid]; -"1094 1406" -> "1095 1410" [label="[1]", style=solid]; -"1095 1410" -> "1131 1411" [label="[1]", style=solid]; -"1096 1262" -> "1097 1264" [label="[1]", style=solid]; -"1097 1264" -> "1100 1265" [label="[1]", style=solid]; -"1098 1255" -> "1099 1257" [label="[1]", style=solid]; -"1099 1257" -> "1100 1265" [label="[1]", style=solid]; -"1100 1265" -> "1101 1267" [label="[1]", style=solid]; -"1101 1267" -> "1102 1380" [label="[1]", style=solid]; -"1101 1267" -> "1110 1293" [label="[1]", style=solid]; -"1101 1267" -> "1116 1351" [label="[1]", style=solid]; -"1102 1380" -> "1103 1381" [label="[1]", style=solid]; -"1103 1381" -> "1108 1382" [label="[1]", style=solid]; -"1104 1329" -> "1105 1334" [label="[1]", style=solid]; -"1105 1334" -> "1106 1345" [label="[1]", style=solid]; -"1106 1345" -> "1107 1375" [label="[1]", style=solid]; -"1107 1375" -> "1108 1382" [label="[1]", style=solid]; -"1108 1382" -> "1109 1398" [label="[1]", style=solid]; -"1108 1382" -> "1128 1392" [label="[1]", style=solid]; -"1109 1398" -> "1122 1399" [label="[1]", style=solid]; -"1110 1293" -> "1113 1294" [label="[1]", style=solid]; -"1111 1289" -> "1112 1291" [label="[1]", style=solid]; -"1112 1291" -> "1113 1294" [label="[1]", style=solid]; -"1113 1294" -> "1114 1358" [label="[1]", style=solid]; -"1114 1358" -> "1115 1359" [label="[1]", style=solid]; -"1115 1359" -> "1121 1360" [label="[1]", style=solid]; -"1116 1351" -> "1117 1352" [label="[1]", style=solid]; -"1117 1352" -> "1120 1353" [label="[1]", style=solid]; -"1118 1309" -> "1119 1314" [label="[1]", style=solid]; -"1119 1314" -> "1120 1353" [label="[1]", style=solid]; -"1120 1353" -> "1121 1360" [label="[1]", style=solid]; -"1121 1360" -> "1122 1399" [label="[1]", style=solid]; -"1121 1360" -> "1129 1393" [label="[1]", style=solid]; -"1122 1399" -> "1123 1401" [label="[1]", style=solid]; -"1123 1401" -> "1124 1409" [label="[1]", style=solid]; -"1124 1409" -> "1131 1411" [label="[1]", style=solid]; -"1125 1395" -> "1126 1396" [label="[1]", style=solid]; -"1126 1396" -> "1127 1408" [label="[1]", style=solid]; -"1127 1408" -> "1131 1411" [label="[1]", style=solid]; -"1128 1392" -> "1129 1393" [label="[1]", style=solid]; -"1129 1393" -> "1130 1407" [label="[1]", style=solid]; -"1130 1407" -> "1131 1411" [label="[1]", style=solid]; -"1131 1411" -> "1132 1413" [label="[1]", style=solid]; -"1132 1413" -> "1133 1420" [label="[1]", style=solid]; -"1133 1420" -> "1134 1422" [label="[1]", style=solid]; -"1134 1422" -> "1135 1440" [label="[1]", style=solid]; -"1134 1422" -> "1139 1429" [label="[1]", style=solid]; -"1135 1440" -> "1136 1445" [label="[1]", style=solid]; -"1136 1445" -> "1137 1446" [label="[1]", style=solid]; -"1137 1446" -> "1138 1448" [label="[1]", style=solid]; -"1138 1448" -> "1143 1449" [label="[1]", style=solid]; -"1139 1429" -> "1140 1434" [label="[1]", style=solid]; -"1140 1434" -> "1141 1435" [label="[1]", style=solid]; -"1141 1435" -> "1142 1447" [label="[1]", style=solid]; -"1142 1447" -> "1143 1449" [label="[1]", style=solid]; -"1143 1449" -> "1144 1451" [label="[1]", style=solid]; -"1144 1451" -> "1145 1452" [label="[1]", style=solid]; -"1144 1451" -> "1168 1480" [label="[1]", style=solid]; -"1145 1452" -> "1146 1456" [label="[1]", style=solid]; -"1145 1452" -> "1148 1453" [label="[1]", style=solid]; -"1145 1452" -> "1150 1459" [label="[1]", style=solid]; -"1146 1456" -> "1147 1458" [label="[1]", style=solid]; -"1147 1458" -> "1150 1459" [label="[1]", style=solid]; -"1148 1453" -> "1149 1455" [label="[1]", style=solid]; -"1149 1455" -> "1150 1459" [label="[1]", style=solid]; -"1150 1459" -> "1151 1460" [label="[1]", style=solid]; -"1151 1460" -> "1152 1462" [label="[1]", style=solid]; -"1151 1460" -> "1155 1461" [label="[1]", style=solid]; -"1152 1462" -> "1153 1468" [label="[1]", style=solid]; -"1153 1468" -> "1154 1469" [label="[1]", style=dashed]; -"1154 1469" -> "1158 1473" [label="[1]", style=dashed]; -"1155 1461" -> "1156 1464" [label="[1]", style=solid]; -"1156 1464" -> "1157 1465" [label="[1]", style=dashed]; -"1157 1465" -> "1158 1473" [label="[1]", style=dashed]; -"1158 1473" -> "1159 1475" [label="[1]", style=dashed]; -"1159 1475" -> "1160 1476" [label="[1]", style=solid]; -"1160 1476" -> "1161 1477" [label="[1]", style=dashed]; -"1161 1477" -> "1162 1478" [label="[1]", style=dashed]; -"1162 1478" -> "1164 1482" [label="[1]", style=dashed]; -"1162 1478" -> "1167 1479" [label="[1]", style=dashed]; -"1163 1424" -> "1164 1482" [label="[1]", style=solid]; -"1164 1482" -> "1165 1484" [label="[1]", style=solid]; -"1164 1482" -> "1174 1500" [label="[1]", style=solid]; -"1165 1484" -> "1166 1485" [label="[1]", style=solid]; -"1166 1485" -> "1170 1488" [label="[1]", style=solid]; -"1167 1479" -> "1168 1480" [label="[1]", style=dashed]; -"1168 1480" -> "1169 1483" [label="[1]", style=solid]; -"1168 1480" -> "1437 1498" [label="[1]", style=solid]; -"1169 1483" -> "1170 1488" [label="[1]", style=solid]; -"1170 1488" -> "1171 1490" [label="[1]", style=dashed]; -"1171 1490" -> "1172 1491" [label="[1]", style=dashed]; -"1172 1491" -> "1173 1496" [label="[1]", style=dashed]; -"1173 1496" -> "1174 1500" [label="[1]", style=dashed]; -"1173 1496" -> "1436 1497" [label="[1]", style=dashed]; -"1174 1500" -> "1421 2480" [label="[1]", style=solid]; +"1044 609" -> "1045 610" [label="[-1]", style=solid]; +"1045 610" -> "1046 611" [label="[-1]", style=solid]; +"1046 611" -> "1047 612" [label="[1, -1]", style=dashed]; +"1047 612" -> "1048 617" [label="[-1, 1]", style=dashed]; +"1048 617" -> "1049 618" [label="[]", style=dashed]; +"1049 618" -> "1050 620" [label="[]", style=solid]; +"1050 620" -> "1051 621" [label="[]", style=solid]; +"1051 621" -> "1052 646" [label="[]", style=solid]; +"1052 646" -> "1053 650" [label="[1, -1]", style=solid]; +"1053 650" -> "1054 652" [label="[-1, -1]", style=solid]; +"1054 652" -> "1055 657" [label="[-1]", style=solid]; +"1054 652" -> "1057 655" [label="[-1]", style=solid]; +"1055 657" -> "1058 659" [label="[-1, 1]", style=solid]; +"1056 656" -> "1058 659" [label="[-1, 1]", style=solid]; +"1057 655" -> "1058 659" [label="[-1, 1]", style=solid]; +"1058 659" -> "1059 661" [label="[-1, 4]", style=solid]; +"1059 661" -> "1060 663" [label="[-1, 1, 4]", style=solid]; +"1060 663" -> "1061 665" [label="[-1, 3, 4]", style=solid]; +"1061 665" -> "1062 1237" [label="[-1, 4]", style=solid]; +"1062 1237" -> "1063 1244" [label="[-1, 4]", style=solid]; +"1063 1244" -> "1064 1245" [label="[]", style=solid]; +"1064 1245" -> "1065 1249" [label="[]", style=solid]; +"1065 1249" -> "1066 1250" [label="[-1, 4]", style=solid]; +"1066 1250" -> "1067 1279" [label="[-1, 4]", style=solid]; +"1066 1250" -> "1069 1272" [label="[-1, 4]", style=solid]; +"1066 1250" -> "1082 1299" [label="[-1, 4]", style=solid]; +"1066 1250" -> "1096 1262" [label="[-1, 4]", style=solid]; +"1066 1250" -> "1098 1255" [label="[-1, 4]", style=solid]; +"1066 1250" -> "1111 1289" [label="[-1, 4]", style=solid]; +"1067 1279" -> "1068 1281" [label="[-1, 4]", style=solid]; +"1068 1281" -> "1071 1282" [label="[-1]", style=solid]; +"1069 1272" -> "1070 1274" [label="[-1, 4]", style=solid]; +"1070 1274" -> "1071 1282" [label="[-1]", style=solid]; +"1071 1282" -> "1072 1284" [label="[-1]", style=solid]; +"1072 1284" -> "1073 1388" [label="[-1]", style=solid]; +"1072 1284" -> "1081 1303" [label="[-1]", style=solid]; +"1072 1284" -> "1087 1365" [label="[-1]", style=solid]; +"1073 1388" -> "1074 1389" [label="[-1]", style=solid]; +"1074 1389" -> "1079 1390" [label="[-1, 1]", style=solid]; +"1075 1339" -> "1076 1344" [label="[-1, 4]", style=solid]; +"1076 1344" -> "1077 1346" [label="[-1, 1]", style=solid]; +"1077 1346" -> "1078 1383" [label="[-1, 1]", style=solid]; +"1078 1383" -> "1079 1390" [label="[-1, 1]", style=solid]; +"1079 1390" -> "1080 1403" [label="[-1, 1]", style=solid]; +"1079 1390" -> "1125 1395" [label="[-1, 1]", style=solid]; +"1080 1403" -> "1093 1404" [label="[-1, 1]", style=solid]; +"1081 1303" -> "1084 1304" [label="[-1]", style=solid]; +"1082 1299" -> "1083 1301" [label="[-1, 4]", style=solid]; +"1083 1301" -> "1084 1304" [label="[-1]", style=solid]; +"1084 1304" -> "1085 1372" [label="[-1]", style=solid]; +"1085 1372" -> "1086 1373" [label="[-1]", style=solid]; +"1086 1373" -> "1092 1374" [label="[-1, 1]", style=solid]; +"1087 1365" -> "1088 1366" [label="[-1]", style=solid]; +"1088 1366" -> "1091 1367" [label="[-1, 1]", style=solid]; +"1089 1319" -> "1090 1324" [label="[-1, 4]", style=solid]; +"1090 1324" -> "1091 1367" [label="[-1, 1]", style=solid]; +"1091 1367" -> "1092 1374" [label="[-1, 1]", style=solid]; +"1092 1374" -> "1093 1404" [label="[-1, 1]", style=solid]; +"1092 1374" -> "1126 1396" [label="[-1, 1]", style=solid]; +"1093 1404" -> "1094 1406" [label="[-1, 1]", style=solid]; +"1094 1406" -> "1095 1410" [label="[-1, 1]", style=solid]; +"1095 1410" -> "1131 1411" [label="[-1, 1, 1]", style=solid]; +"1096 1262" -> "1097 1264" [label="[-1, 4]", style=solid]; +"1097 1264" -> "1100 1265" [label="[-1]", style=solid]; +"1098 1255" -> "1099 1257" [label="[-1, 4]", style=solid]; +"1099 1257" -> "1100 1265" [label="[-1]", style=solid]; +"1100 1265" -> "1101 1267" [label="[-1]", style=solid]; +"1101 1267" -> "1102 1380" [label="[-1]", style=solid]; +"1101 1267" -> "1110 1293" [label="[-1]", style=solid]; +"1101 1267" -> "1116 1351" [label="[-1]", style=solid]; +"1102 1380" -> "1103 1381" [label="[-1]", style=solid]; +"1103 1381" -> "1108 1382" [label="[-1, 1]", style=solid]; +"1104 1329" -> "1105 1334" [label="[-1, 4]", style=solid]; +"1105 1334" -> "1106 1345" [label="[-1, 1]", style=solid]; +"1106 1345" -> "1107 1375" [label="[-1, 1]", style=solid]; +"1107 1375" -> "1108 1382" [label="[-1, 1]", style=solid]; +"1108 1382" -> "1109 1398" [label="[-1, 1]", style=solid]; +"1108 1382" -> "1128 1392" [label="[-1, 1]", style=solid]; +"1109 1398" -> "1122 1399" [label="[-1, 1]", style=solid]; +"1110 1293" -> "1113 1294" [label="[-1]", style=solid]; +"1111 1289" -> "1112 1291" [label="[-1, 4]", style=solid]; +"1112 1291" -> "1113 1294" [label="[-1]", style=solid]; +"1113 1294" -> "1114 1358" [label="[-1]", style=solid]; +"1114 1358" -> "1115 1359" [label="[-1]", style=solid]; +"1115 1359" -> "1121 1360" [label="[-1, 1]", style=solid]; +"1116 1351" -> "1117 1352" [label="[-1]", style=solid]; +"1117 1352" -> "1120 1353" [label="[-1, 1]", style=solid]; +"1118 1309" -> "1119 1314" [label="[-1, 4]", style=solid]; +"1119 1314" -> "1120 1353" [label="[-1, 1]", style=solid]; +"1120 1353" -> "1121 1360" [label="[-1, 1]", style=solid]; +"1121 1360" -> "1122 1399" [label="[-1, 1]", style=solid]; +"1121 1360" -> "1129 1393" [label="[-1, 1]", style=solid]; +"1122 1399" -> "1123 1401" [label="[-1, 1]", style=solid]; +"1123 1401" -> "1124 1409" [label="[-1, 1]", style=solid]; +"1124 1409" -> "1131 1411" [label="[-1, 1, 1]", style=solid]; +"1125 1395" -> "1126 1396" [label="[-1, 1]", style=solid]; +"1126 1396" -> "1127 1408" [label="[-1, 1]", style=solid]; +"1127 1408" -> "1131 1411" [label="[-1, 1, 1]", style=solid]; +"1128 1392" -> "1129 1393" [label="[-1, 1]", style=solid]; +"1129 1393" -> "1130 1407" [label="[-1, 1]", style=solid]; +"1130 1407" -> "1131 1411" [label="[-1, 1, 1]", style=solid]; +"1131 1411" -> "1132 1413" [label="[-1, 1, 4]", style=solid]; +"1132 1413" -> "1133 1420" [label="[]", style=solid]; +"1133 1420" -> "1134 1422" [label="[]", style=solid]; +"1134 1422" -> "1135 1440" [label="[]", style=solid]; +"1134 1422" -> "1139 1429" [label="[]", style=solid]; +"1135 1440" -> "1136 1445" [label="[]", style=solid]; +"1136 1445" -> "1137 1446" [label="[]", style=solid]; +"1137 1446" -> "1138 1448" [label="[]", style=solid]; +"1138 1448" -> "1143 1449" [label="[]", style=solid]; +"1139 1429" -> "1140 1434" [label="[]", style=solid]; +"1140 1434" -> "1141 1435" [label="[]", style=solid]; +"1141 1435" -> "1142 1447" [label="[]", style=solid]; +"1142 1447" -> "1143 1449" [label="[]", style=solid]; +"1143 1449" -> "1144 1451" [label="[]", style=solid]; +"1144 1451" -> "1145 1452" [label="[-1, 4]", style=solid]; +"1144 1451" -> "1168 1480" [label="[-1, 4]", style=solid]; +"1145 1452" -> "1146 1456" [label="[-1, 1]", style=solid]; +"1145 1452" -> "1148 1453" [label="[-1, 1]", style=solid]; +"1145 1452" -> "1150 1459" [label="[-1, 1]", style=solid]; +"1146 1456" -> "1147 1458" [label="[-1, 1]", style=solid]; +"1147 1458" -> "1150 1459" [label="[-1, 1]", style=solid]; +"1148 1453" -> "1149 1455" [label="[-1, 1]", style=solid]; +"1149 1455" -> "1150 1459" [label="[-1, 1]", style=solid]; +"1150 1459" -> "1151 1460" [label="[-1, 4]", style=solid]; +"1151 1460" -> "1152 1462" [label="[-1, 1]", style=solid]; +"1151 1460" -> "1155 1461" [label="[-1, 1]", style=solid]; +"1152 1462" -> "1153 1468" [label="[-1]", style=solid]; +"1153 1468" -> "1154 1469" [label="[-1]", style=dashed]; +"1154 1469" -> "1158 1473" [label="[-1]", style=dashed]; +"1155 1461" -> "1156 1464" [label="[-1]", style=solid]; +"1156 1464" -> "1157 1465" [label="[-1]", style=dashed]; +"1157 1465" -> "1158 1473" [label="[-1]", style=dashed]; +"1158 1473" -> "1159 1475" [label="[-1]", style=dashed]; +"1159 1475" -> "1160 1476" [label="[-1]", style=solid]; +"1160 1476" -> "1161 1477" [label="[1, -1]", style=dashed]; +"1161 1477" -> "1162 1478" [label="[-1, 1]", style=dashed]; +"1162 1478" -> "1164 1482" [label="[-1]", style=dashed]; +"1162 1478" -> "1167 1479" [label="[-1]", style=dashed]; +"1163 1424" -> "1164 1482" [label="[]", style=solid]; +"1164 1482" -> "1165 1484" [label="[]", style=solid]; +"1164 1482" -> "1174 1500" [label="[]", style=solid]; +"1165 1484" -> "1166 1485" [label="[]", style=solid]; +"1166 1485" -> "1170 1488" [label="[]", style=solid]; +"1167 1479" -> "1168 1480" [label="[-1]", style=dashed]; +"1168 1480" -> "1169 1483" [label="[-1, 4]", style=solid]; +"1168 1480" -> "1437 1498" [label="[-1, 4]", style=solid]; +"1169 1483" -> "1170 1488" [label="[1, -1, 4]", style=solid]; +"1170 1488" -> "1171 1490" [label="[-1, 3]", style=dashed]; +"1171 1490" -> "1172 1491" [label="[-1, 1]", style=dashed]; +"1172 1491" -> "1173 1496" [label="[-1]", style=dashed]; +"1173 1496" -> "1174 1500" [label="[-1]", style=dashed]; +"1173 1496" -> "1436 1497" [label="[-1]", style=dashed]; +"1174 1500" -> "1421 2480" [label="[]", style=solid]; "1175 1092" -> "1202 1093" [label="[1]", style=dashed]; "1176 1091" -> "1202 1093" [label="[1]", style=dashed]; -"1177 482" -> "1194 483" [label="[1]", style=solid]; +"1177 482" -> "1194 483" [label="[1, 256, -1, -1]", style=solid]; "1178 471" -> "1179 474" [label="[4]", style=dashed]; "1179 474" -> "1180 475" [label="[2]", style=dashed]; "1180 475" -> "1191 477" [label="[2]", style=solid]; @@ -5325,20 +5325,20 @@ strict digraph { "1190 470" -> "1191 477" [label="[2]", style=solid]; "1191 477" -> "1192 478" [label="[2]", style=solid]; "1192 478" -> "1193 479" [label="[4]", style=solid]; -"1193 479" -> "1194 483" [label="[1]", style=solid]; -"1194 483" -> "1195 486" [label="[1]", style=solid]; -"1195 486" -> "1196 490" [label="[1]", style=solid]; -"1195 486" -> "1256 518" [label="[1]", style=solid]; -"1195 486" -> "1259 515" [label="[1]", style=solid]; -"1195 486" -> "1481 2569" [label="[1]", style=solid]; -"1195 486" -> "3386 6613" [label="[1]", style=solid]; -"1196 490" -> "1197 491" [label="[1]", style=solid]; -"1197 491" -> "1198 494" [label="[1]", style=solid]; -"1197 491" -> "1245 497" [label="[1]", style=solid]; -"1198 494" -> "1199 850" [label="[1]", style=solid]; -"1198 494" -> "1210 856" [label="[1]", style=solid]; -"1198 494" -> "1213 853" [label="[1]", style=solid]; -"1198 494" -> "1220 866" [label="[1]", style=solid]; +"1193 479" -> "1194 483" [label="[-1, -1, -1, -1]", style=solid]; +"1194 483" -> "1195 486" [label="[-1, 256, -1, -1]", style=solid]; +"1195 486" -> "1196 490" [label="[-1, 256, -1, -1]", style=solid]; +"1195 486" -> "1256 518" [label="[-1, 256, -1, -1]", style=solid]; +"1195 486" -> "1259 515" [label="[-1, 256, -1, -1]", style=solid]; +"1195 486" -> "1481 2569" [label="[-1, 256, -1, -1]", style=solid]; +"1195 486" -> "3386 6613" [label="[-1, 256, -1, -1]", style=solid]; +"1196 490" -> "1197 491" [label="[-1, 256, -1, -1]", style=solid]; +"1197 491" -> "1198 494" [label="[-1, 256, -1, -1]", style=solid]; +"1197 491" -> "1245 497" [label="[-1, 256, -1, -1]", style=solid]; +"1198 494" -> "1199 850" [label="[-1, 3, -1, -1]", style=solid]; +"1198 494" -> "1210 856" [label="[-1, 3, -1, -1]", style=solid]; +"1198 494" -> "1213 853" [label="[-1, 3, -1, -1]", style=solid]; +"1198 494" -> "1220 866" [label="[-1, 3, -1, -1]", style=solid]; "1199 850" -> "1200 851" [label="[4]", style=dashed]; "1200 851" -> "1201 1090" [label="[]", style=dashed]; "1200 851" -> "1204 876" [label="[]", style=dashed]; @@ -5368,13 +5368,13 @@ strict digraph { "1217 861" -> "1219 865" [label="[1]", style=dashed]; "1218 860" -> "1219 865" [label="[1]", style=dashed]; "1219 865" -> "1220 866" [label="[5]", style=dashed]; -"1220 866" -> "1221 867" [label="[1]", style=solid]; -"1221 867" -> "1222 874" [label="[1]", style=solid]; -"1222 874" -> "1223 879" [label="[1]", style=solid]; -"1223 879" -> "1224 880" [label="[1]", style=solid]; -"1224 880" -> "1225 898" [label="[1]", style=solid]; -"1224 880" -> "1233 908" [label="[1]", style=solid]; -"1225 898" -> "1226 900" [label="[1]", style=dashed]; +"1220 866" -> "1221 867" [label="[]", style=solid]; +"1221 867" -> "1222 874" [label="[]", style=solid]; +"1222 874" -> "1223 879" [label="[]", style=solid]; +"1223 879" -> "1224 880" [label="[]", style=solid]; +"1224 880" -> "1225 898" [label="[]", style=solid]; +"1224 880" -> "1233 908" [label="[]", style=solid]; +"1225 898" -> "1226 900" [label="[-1]", style=dashed]; "1226 900" -> "1227 901" [label="[]", style=dashed]; "1227 901" -> "1228 903" [label="[1]", style=dashed]; "1228 903" -> "1229 904" [label="[2]", style=dashed]; @@ -5382,10 +5382,10 @@ strict digraph { "1230 905" -> "1231 906" [label="[]", style=dashed]; "1231 906" -> "1232 907" [label="[]", style=dashed]; "1232 907" -> "1233 908" [label="[1]", style=dashed]; -"1233 908" -> "1234 909" [label="[1]", style=dashed]; -"1233 908" -> "1409 1098" [label="[1]", style=solid]; -"1234 909" -> "1249 910" [label="[1]", style=dashed]; -"1234 909" -> "1310 919" [label="[1]", style=dashed]; +"1233 908" -> "1234 909" [label="[]", style=dashed]; +"1233 908" -> "1409 1098" [label="[]", style=solid]; +"1234 909" -> "1249 910" [label="[]", style=dashed]; +"1234 909" -> "1310 919" [label="[]", style=dashed]; "1235 895" -> "1238 896" [label="[1]", style=dashed]; "1236 894" -> "1238 896" [label="[1]", style=dashed]; "1237 893" -> "1238 896" [label="[1]", style=dashed]; @@ -5396,16 +5396,16 @@ strict digraph { "1242 884" -> "1244 888" [label="[1]", style=dashed]; "1243 883" -> "1244 888" [label="[1]", style=dashed]; "1244 888" -> "1246 889" [label="[5]", style=dashed]; -"1245 497" -> "1246 889" [label="[1]", style=solid]; -"1246 889" -> "1247 890" [label="[1]", style=solid]; -"1247 890" -> "1248 897" [label="[1]", style=solid]; -"1248 897" -> "1249 910" [label="[1]", style=solid]; -"1249 910" -> "1250 921" [label="[1]", style=solid]; -"1250 921" -> "1251 1086" [label="[1]", style=solid]; -"1250 921" -> "1321 1013" [label="[1]", style=solid]; -"1250 921" -> "1335 993" [label="[1]", style=solid]; -"1250 921" -> "1350 1003" [label="[1]", style=solid]; -"1250 921" -> "1364 983" [label="[1]", style=solid]; +"1245 497" -> "1246 889" [label="[-1, 12, -1, -1]", style=solid]; +"1246 889" -> "1247 890" [label="[]", style=solid]; +"1247 890" -> "1248 897" [label="[]", style=solid]; +"1248 897" -> "1249 910" [label="[]", style=solid]; +"1249 910" -> "1250 921" [label="[]", style=solid]; +"1250 921" -> "1251 1086" [label="[-1, 4]", style=solid]; +"1250 921" -> "1321 1013" [label="[-1, 4]", style=solid]; +"1250 921" -> "1335 993" [label="[-1, 4]", style=solid]; +"1250 921" -> "1350 1003" [label="[-1, 4]", style=solid]; +"1250 921" -> "1364 983" [label="[-1, 4]", style=solid]; "1251 1086" -> "1378 1087" [label="[2]", style=dashed]; "1252 916" -> "1255 917" [label="[1]", style=dashed]; "1253 915" -> "1255 917" [label="[1]", style=dashed]; @@ -5430,19 +5430,19 @@ strict digraph { "1266 562" -> "1267 563" [label="[]", style=dashed]; "1267 563" -> "1268 564" [label="[1]", style=dashed]; "1268 564" -> "1269 565" [label="[1]", style=dashed]; -"1269 565" -> "1270 566" [label="[1]", style=solid]; -"1270 566" -> "1271 567" [label="[1]", style=solid]; -"1271 567" -> "1272 568" [label="[1]", style=dashed]; -"1272 568" -> "1273 573" [label="[1]", style=dashed]; -"1273 573" -> "1274 574" [label="[1]", style=dashed]; -"1274 574" -> "1275 576" [label="[1]", style=solid]; -"1275 576" -> "1276 577" [label="[1]", style=solid]; -"1276 577" -> "1277 579" [label="[1]", style=solid]; -"1277 579" -> "1278 583" [label="[1]", style=solid]; -"1278 583" -> "1279 593" [label="[1]", style=solid]; -"1279 593" -> "1280 597" [label="[1]", style=solid]; -"1279 593" -> "1302 595" [label="[1]", style=solid]; -"1280 597" -> "1304 598" [label="[1]", style=solid]; +"1269 565" -> "1270 566" [label="[-1]", style=solid]; +"1270 566" -> "1271 567" [label="[-1]", style=solid]; +"1271 567" -> "1272 568" [label="[1, -1]", style=dashed]; +"1272 568" -> "1273 573" [label="[-1, 1]", style=dashed]; +"1273 573" -> "1274 574" [label="[]", style=dashed]; +"1274 574" -> "1275 576" [label="[]", style=solid]; +"1275 576" -> "1276 577" [label="[]", style=solid]; +"1276 577" -> "1277 579" [label="[]", style=solid]; +"1277 579" -> "1278 583" [label="[-1, 1]", style=solid]; +"1278 583" -> "1279 593" [label="[-1, -1]", style=solid]; +"1279 593" -> "1280 597" [label="[-1]", style=solid]; +"1279 593" -> "1302 595" [label="[-1]", style=solid]; +"1280 597" -> "1304 598" [label="[-1, 1]", style=solid]; "1281 587" -> "1283 588" [label="[1]", style=dashed]; "1282 586" -> "1283 588" [label="[1]", style=dashed]; "1283 588" -> "1299 589" [label="[2]", style=dashed]; @@ -5452,247 +5452,247 @@ strict digraph { "1287 545" -> "1288 546" [label="[]", style=dashed]; "1288 546" -> "1289 547" [label="[1]", style=dashed]; "1289 547" -> "1290 548" [label="[1]", style=dashed]; -"1290 548" -> "1291 549" [label="[1]", style=solid]; -"1291 549" -> "1292 550" [label="[1]", style=solid]; -"1292 550" -> "1293 551" [label="[1]", style=dashed]; -"1293 551" -> "1294 556" [label="[1]", style=dashed]; -"1294 556" -> "1295 557" [label="[1]", style=dashed]; -"1295 557" -> "1296 559" [label="[1]", style=solid]; -"1296 559" -> "1297 560" [label="[1]", style=solid]; -"1297 560" -> "1298 585" [label="[1]", style=solid]; -"1298 585" -> "1299 589" [label="[1]", style=solid]; -"1299 589" -> "1300 591" [label="[1]", style=solid]; -"1300 591" -> "1301 596" [label="[1]", style=solid]; -"1300 591" -> "1303 594" [label="[1]", style=solid]; -"1301 596" -> "1304 598" [label="[1]", style=solid]; -"1302 595" -> "1304 598" [label="[1]", style=solid]; -"1303 594" -> "1304 598" [label="[1]", style=solid]; -"1304 598" -> "1305 600" [label="[1]", style=solid]; -"1305 600" -> "1306 602" [label="[1]", style=solid]; -"1306 602" -> "1307 604" [label="[1]", style=solid]; -"1307 604" -> "1308 911" [label="[1]", style=solid]; -"1308 911" -> "1309 918" [label="[1]", style=solid]; -"1309 918" -> "1310 919" [label="[1]", style=solid]; -"1310 919" -> "1311 923" [label="[1]", style=solid]; -"1311 923" -> "1312 924" [label="[1]", style=solid]; -"1312 924" -> "1313 953" [label="[1]", style=solid]; -"1312 924" -> "1315 946" [label="[1]", style=solid]; -"1312 924" -> "1328 973" [label="[1]", style=solid]; -"1312 924" -> "1342 936" [label="[1]", style=solid]; -"1312 924" -> "1344 929" [label="[1]", style=solid]; -"1312 924" -> "1357 963" [label="[1]", style=solid]; -"1313 953" -> "1314 955" [label="[1]", style=solid]; -"1314 955" -> "1317 956" [label="[1]", style=solid]; -"1315 946" -> "1316 948" [label="[1]", style=solid]; -"1316 948" -> "1317 956" [label="[1]", style=solid]; -"1317 956" -> "1318 958" [label="[1]", style=solid]; -"1318 958" -> "1319 1062" [label="[1]", style=solid]; -"1318 958" -> "1327 977" [label="[1]", style=solid]; -"1318 958" -> "1333 1039" [label="[1]", style=solid]; -"1319 1062" -> "1320 1063" [label="[1]", style=solid]; -"1320 1063" -> "1325 1064" [label="[1]", style=solid]; -"1321 1013" -> "1322 1018" [label="[1]", style=solid]; -"1322 1018" -> "1323 1020" [label="[1]", style=solid]; -"1323 1020" -> "1324 1057" [label="[1]", style=solid]; -"1324 1057" -> "1325 1064" [label="[1]", style=solid]; -"1325 1064" -> "1326 1077" [label="[1]", style=solid]; -"1325 1064" -> "1371 1069" [label="[1]", style=solid]; -"1326 1077" -> "1339 1078" [label="[1]", style=solid]; -"1327 977" -> "1330 978" [label="[1]", style=solid]; -"1328 973" -> "1329 975" [label="[1]", style=solid]; -"1329 975" -> "1330 978" [label="[1]", style=solid]; -"1330 978" -> "1331 1046" [label="[1]", style=solid]; -"1331 1046" -> "1332 1047" [label="[1]", style=solid]; -"1332 1047" -> "1338 1048" [label="[1]", style=solid]; -"1333 1039" -> "1334 1040" [label="[1]", style=solid]; -"1334 1040" -> "1337 1041" [label="[1]", style=solid]; -"1335 993" -> "1336 998" [label="[1]", style=solid]; -"1336 998" -> "1337 1041" [label="[1]", style=solid]; -"1337 1041" -> "1338 1048" [label="[1]", style=solid]; -"1338 1048" -> "1339 1078" [label="[1]", style=solid]; -"1338 1048" -> "1372 1070" [label="[1]", style=solid]; -"1339 1078" -> "1340 1080" [label="[1]", style=solid]; -"1340 1080" -> "1341 1084" [label="[1]", style=solid]; -"1341 1084" -> "1377 1085" [label="[1]", style=solid]; -"1342 936" -> "1343 938" [label="[1]", style=solid]; -"1343 938" -> "1346 939" [label="[1]", style=solid]; -"1344 929" -> "1345 931" [label="[1]", style=solid]; -"1345 931" -> "1346 939" [label="[1]", style=solid]; -"1346 939" -> "1347 941" [label="[1]", style=solid]; -"1347 941" -> "1348 1054" [label="[1]", style=solid]; -"1347 941" -> "1356 967" [label="[1]", style=solid]; -"1347 941" -> "1362 1025" [label="[1]", style=solid]; -"1348 1054" -> "1349 1055" [label="[1]", style=solid]; -"1349 1055" -> "1354 1056" [label="[1]", style=solid]; -"1350 1003" -> "1351 1008" [label="[1]", style=solid]; -"1351 1008" -> "1352 1019" [label="[1]", style=solid]; -"1352 1019" -> "1353 1049" [label="[1]", style=solid]; -"1353 1049" -> "1354 1056" [label="[1]", style=solid]; -"1354 1056" -> "1355 1072" [label="[1]", style=solid]; -"1354 1056" -> "1374 1066" [label="[1]", style=solid]; -"1355 1072" -> "1368 1073" [label="[1]", style=solid]; -"1356 967" -> "1359 968" [label="[1]", style=solid]; -"1357 963" -> "1358 965" [label="[1]", style=solid]; -"1358 965" -> "1359 968" [label="[1]", style=solid]; -"1359 968" -> "1360 1032" [label="[1]", style=solid]; -"1360 1032" -> "1361 1033" [label="[1]", style=solid]; -"1361 1033" -> "1367 1034" [label="[1]", style=solid]; -"1362 1025" -> "1363 1026" [label="[1]", style=solid]; -"1363 1026" -> "1366 1027" [label="[1]", style=solid]; -"1364 983" -> "1365 988" [label="[1]", style=solid]; -"1365 988" -> "1366 1027" [label="[1]", style=solid]; -"1366 1027" -> "1367 1034" [label="[1]", style=solid]; -"1367 1034" -> "1368 1073" [label="[1]", style=solid]; -"1367 1034" -> "1375 1067" [label="[1]", style=solid]; -"1368 1073" -> "1369 1075" [label="[1]", style=solid]; -"1369 1075" -> "1370 1083" [label="[1]", style=solid]; -"1370 1083" -> "1377 1085" [label="[1]", style=solid]; -"1371 1069" -> "1372 1070" [label="[1]", style=solid]; -"1372 1070" -> "1373 1082" [label="[1]", style=solid]; -"1373 1082" -> "1377 1085" [label="[1]", style=solid]; -"1374 1066" -> "1375 1067" [label="[1]", style=solid]; -"1375 1067" -> "1376 1081" [label="[1]", style=solid]; -"1376 1081" -> "1377 1085" [label="[1]", style=solid]; -"1377 1085" -> "1378 1087" [label="[1]", style=solid]; -"1378 1087" -> "1379 1094" [label="[1]", style=solid]; -"1379 1094" -> "1380 1096" [label="[1]", style=solid]; -"1380 1096" -> "1381 1114" [label="[1]", style=solid]; -"1380 1096" -> "1385 1103" [label="[1]", style=solid]; -"1381 1114" -> "1382 1119" [label="[1]", style=solid]; -"1382 1119" -> "1383 1120" [label="[1]", style=solid]; -"1383 1120" -> "1384 1122" [label="[1]", style=solid]; -"1384 1122" -> "1389 1123" [label="[1]", style=solid]; -"1385 1103" -> "1386 1108" [label="[1]", style=solid]; -"1386 1108" -> "1387 1109" [label="[1]", style=solid]; -"1387 1109" -> "1388 1121" [label="[1]", style=solid]; -"1388 1121" -> "1389 1123" [label="[1]", style=solid]; -"1389 1123" -> "1390 1125" [label="[1]", style=solid]; -"1390 1125" -> "1391 1126" [label="[1]", style=solid]; -"1390 1125" -> "1414 1154" [label="[1]", style=solid]; -"1391 1126" -> "1392 1130" [label="[1]", style=solid]; -"1391 1126" -> "1394 1127" [label="[1]", style=solid]; -"1391 1126" -> "1396 1133" [label="[1]", style=solid]; -"1392 1130" -> "1393 1132" [label="[1]", style=solid]; -"1393 1132" -> "1396 1133" [label="[1]", style=solid]; -"1394 1127" -> "1395 1129" [label="[1]", style=solid]; -"1395 1129" -> "1396 1133" [label="[1]", style=solid]; -"1396 1133" -> "1397 1134" [label="[1]", style=solid]; -"1397 1134" -> "1398 1136" [label="[1]", style=solid]; -"1397 1134" -> "1401 1135" [label="[1]", style=solid]; -"1398 1136" -> "1399 1142" [label="[1]", style=solid]; -"1399 1142" -> "1400 1143" [label="[1]", style=dashed]; -"1400 1143" -> "1404 1147" [label="[1]", style=dashed]; -"1401 1135" -> "1402 1138" [label="[1]", style=solid]; -"1402 1138" -> "1403 1139" [label="[1]", style=dashed]; -"1403 1139" -> "1404 1147" [label="[1]", style=dashed]; -"1404 1147" -> "1405 1149" [label="[1]", style=dashed]; -"1405 1149" -> "1406 1150" [label="[1]", style=solid]; -"1406 1150" -> "1407 1151" [label="[1]", style=dashed]; -"1407 1151" -> "1408 1152" [label="[1]", style=dashed]; -"1408 1152" -> "1410 1156" [label="[1]", style=dashed]; -"1408 1152" -> "1413 1153" [label="[1]", style=dashed]; -"1409 1098" -> "1410 1156" [label="[1]", style=solid]; -"1410 1156" -> "1411 1158" [label="[1]", style=solid]; -"1410 1156" -> "1420 1174" [label="[1]", style=solid]; -"1411 1158" -> "1412 1159" [label="[1]", style=solid]; -"1412 1159" -> "1416 1162" [label="[1]", style=solid]; -"1413 1153" -> "1414 1154" [label="[1]", style=dashed]; -"1414 1154" -> "1415 1157" [label="[1]", style=solid]; -"1414 1154" -> "1439 1172" [label="[1]", style=solid]; -"1415 1157" -> "1416 1162" [label="[1]", style=solid]; -"1416 1162" -> "1417 1164" [label="[1]", style=dashed]; -"1417 1164" -> "1418 1165" [label="[1]", style=dashed]; -"1418 1165" -> "1419 1170" [label="[1]", style=dashed]; -"1419 1170" -> "1420 1174" [label="[1]", style=dashed]; -"1419 1170" -> "1438 1171" [label="[1]", style=dashed]; -"1420 1174" -> "1421 2480" [label="[1]", style=solid]; -"1421 2480" -> "1422 2481" [label="[1]", style=solid]; -"1421 2480" -> "1428 2488" [label="[1]", style=solid]; -"1422 2481" -> "1423 2483" [label="[1]", style=dashed]; -"1423 2483" -> "1424 2484" [label="[1]", style=dashed]; -"1424 2484" -> "1425 2485" [label="[1]", style=dashed]; +"1290 548" -> "1291 549" [label="[-1]", style=solid]; +"1291 549" -> "1292 550" [label="[-1]", style=solid]; +"1292 550" -> "1293 551" [label="[1, -1]", style=dashed]; +"1293 551" -> "1294 556" [label="[-1, 1]", style=dashed]; +"1294 556" -> "1295 557" [label="[]", style=dashed]; +"1295 557" -> "1296 559" [label="[]", style=solid]; +"1296 559" -> "1297 560" [label="[]", style=solid]; +"1297 560" -> "1298 585" [label="[]", style=solid]; +"1298 585" -> "1299 589" [label="[1, -1]", style=solid]; +"1299 589" -> "1300 591" [label="[-1, -1]", style=solid]; +"1300 591" -> "1301 596" [label="[-1]", style=solid]; +"1300 591" -> "1303 594" [label="[-1]", style=solid]; +"1301 596" -> "1304 598" [label="[-1, 1]", style=solid]; +"1302 595" -> "1304 598" [label="[-1, 1]", style=solid]; +"1303 594" -> "1304 598" [label="[-1, 1]", style=solid]; +"1304 598" -> "1305 600" [label="[-1, 4]", style=solid]; +"1305 600" -> "1306 602" [label="[-1, 1, 4]", style=solid]; +"1306 602" -> "1307 604" [label="[-1, 3, 4]", style=solid]; +"1307 604" -> "1308 911" [label="[-1, 4]", style=solid]; +"1308 911" -> "1309 918" [label="[-1, 4]", style=solid]; +"1309 918" -> "1310 919" [label="[]", style=solid]; +"1310 919" -> "1311 923" [label="[]", style=solid]; +"1311 923" -> "1312 924" [label="[-1, 4]", style=solid]; +"1312 924" -> "1313 953" [label="[-1, 4]", style=solid]; +"1312 924" -> "1315 946" [label="[-1, 4]", style=solid]; +"1312 924" -> "1328 973" [label="[-1, 4]", style=solid]; +"1312 924" -> "1342 936" [label="[-1, 4]", style=solid]; +"1312 924" -> "1344 929" [label="[-1, 4]", style=solid]; +"1312 924" -> "1357 963" [label="[-1, 4]", style=solid]; +"1313 953" -> "1314 955" [label="[-1, 4]", style=solid]; +"1314 955" -> "1317 956" [label="[-1]", style=solid]; +"1315 946" -> "1316 948" [label="[-1, 4]", style=solid]; +"1316 948" -> "1317 956" [label="[-1]", style=solid]; +"1317 956" -> "1318 958" [label="[-1]", style=solid]; +"1318 958" -> "1319 1062" [label="[-1]", style=solid]; +"1318 958" -> "1327 977" [label="[-1]", style=solid]; +"1318 958" -> "1333 1039" [label="[-1]", style=solid]; +"1319 1062" -> "1320 1063" [label="[-1]", style=solid]; +"1320 1063" -> "1325 1064" [label="[-1, 1]", style=solid]; +"1321 1013" -> "1322 1018" [label="[-1, 4]", style=solid]; +"1322 1018" -> "1323 1020" [label="[-1, 1]", style=solid]; +"1323 1020" -> "1324 1057" [label="[-1, 1]", style=solid]; +"1324 1057" -> "1325 1064" [label="[-1, 1]", style=solid]; +"1325 1064" -> "1326 1077" [label="[-1, 1]", style=solid]; +"1325 1064" -> "1371 1069" [label="[-1, 1]", style=solid]; +"1326 1077" -> "1339 1078" [label="[-1, 1]", style=solid]; +"1327 977" -> "1330 978" [label="[-1]", style=solid]; +"1328 973" -> "1329 975" [label="[-1, 4]", style=solid]; +"1329 975" -> "1330 978" [label="[-1]", style=solid]; +"1330 978" -> "1331 1046" [label="[-1]", style=solid]; +"1331 1046" -> "1332 1047" [label="[-1]", style=solid]; +"1332 1047" -> "1338 1048" [label="[-1, 1]", style=solid]; +"1333 1039" -> "1334 1040" [label="[-1]", style=solid]; +"1334 1040" -> "1337 1041" [label="[-1, 1]", style=solid]; +"1335 993" -> "1336 998" [label="[-1, 4]", style=solid]; +"1336 998" -> "1337 1041" [label="[-1, 1]", style=solid]; +"1337 1041" -> "1338 1048" [label="[-1, 1]", style=solid]; +"1338 1048" -> "1339 1078" [label="[-1, 1]", style=solid]; +"1338 1048" -> "1372 1070" [label="[-1, 1]", style=solid]; +"1339 1078" -> "1340 1080" [label="[-1, 1]", style=solid]; +"1340 1080" -> "1341 1084" [label="[-1, 1]", style=solid]; +"1341 1084" -> "1377 1085" [label="[-1, 1, 1]", style=solid]; +"1342 936" -> "1343 938" [label="[-1, 4]", style=solid]; +"1343 938" -> "1346 939" [label="[-1]", style=solid]; +"1344 929" -> "1345 931" [label="[-1, 4]", style=solid]; +"1345 931" -> "1346 939" [label="[-1]", style=solid]; +"1346 939" -> "1347 941" [label="[-1]", style=solid]; +"1347 941" -> "1348 1054" [label="[-1]", style=solid]; +"1347 941" -> "1356 967" [label="[-1]", style=solid]; +"1347 941" -> "1362 1025" [label="[-1]", style=solid]; +"1348 1054" -> "1349 1055" [label="[-1]", style=solid]; +"1349 1055" -> "1354 1056" [label="[-1, 1]", style=solid]; +"1350 1003" -> "1351 1008" [label="[-1, 4]", style=solid]; +"1351 1008" -> "1352 1019" [label="[-1, 1]", style=solid]; +"1352 1019" -> "1353 1049" [label="[-1, 1]", style=solid]; +"1353 1049" -> "1354 1056" [label="[-1, 1]", style=solid]; +"1354 1056" -> "1355 1072" [label="[-1, 1]", style=solid]; +"1354 1056" -> "1374 1066" [label="[-1, 1]", style=solid]; +"1355 1072" -> "1368 1073" [label="[-1, 1]", style=solid]; +"1356 967" -> "1359 968" [label="[-1]", style=solid]; +"1357 963" -> "1358 965" [label="[-1, 4]", style=solid]; +"1358 965" -> "1359 968" [label="[-1]", style=solid]; +"1359 968" -> "1360 1032" [label="[-1]", style=solid]; +"1360 1032" -> "1361 1033" [label="[-1]", style=solid]; +"1361 1033" -> "1367 1034" [label="[-1, 1]", style=solid]; +"1362 1025" -> "1363 1026" [label="[-1]", style=solid]; +"1363 1026" -> "1366 1027" [label="[-1, 1]", style=solid]; +"1364 983" -> "1365 988" [label="[-1, 4]", style=solid]; +"1365 988" -> "1366 1027" [label="[-1, 1]", style=solid]; +"1366 1027" -> "1367 1034" [label="[-1, 1]", style=solid]; +"1367 1034" -> "1368 1073" [label="[-1, 1]", style=solid]; +"1367 1034" -> "1375 1067" [label="[-1, 1]", style=solid]; +"1368 1073" -> "1369 1075" [label="[-1, 1]", style=solid]; +"1369 1075" -> "1370 1083" [label="[-1, 1]", style=solid]; +"1370 1083" -> "1377 1085" [label="[-1, 1, 1]", style=solid]; +"1371 1069" -> "1372 1070" [label="[-1, 1]", style=solid]; +"1372 1070" -> "1373 1082" [label="[-1, 1]", style=solid]; +"1373 1082" -> "1377 1085" [label="[-1, 1, 1]", style=solid]; +"1374 1066" -> "1375 1067" [label="[-1, 1]", style=solid]; +"1375 1067" -> "1376 1081" [label="[-1, 1]", style=solid]; +"1376 1081" -> "1377 1085" [label="[-1, 1, 1]", style=solid]; +"1377 1085" -> "1378 1087" [label="[-1, 1, 4]", style=solid]; +"1378 1087" -> "1379 1094" [label="[]", style=solid]; +"1379 1094" -> "1380 1096" [label="[]", style=solid]; +"1380 1096" -> "1381 1114" [label="[]", style=solid]; +"1380 1096" -> "1385 1103" [label="[]", style=solid]; +"1381 1114" -> "1382 1119" [label="[]", style=solid]; +"1382 1119" -> "1383 1120" [label="[]", style=solid]; +"1383 1120" -> "1384 1122" [label="[]", style=solid]; +"1384 1122" -> "1389 1123" [label="[]", style=solid]; +"1385 1103" -> "1386 1108" [label="[]", style=solid]; +"1386 1108" -> "1387 1109" [label="[]", style=solid]; +"1387 1109" -> "1388 1121" [label="[]", style=solid]; +"1388 1121" -> "1389 1123" [label="[]", style=solid]; +"1389 1123" -> "1390 1125" [label="[]", style=solid]; +"1390 1125" -> "1391 1126" [label="[-1, 4]", style=solid]; +"1390 1125" -> "1414 1154" [label="[-1, 4]", style=solid]; +"1391 1126" -> "1392 1130" [label="[-1, 1]", style=solid]; +"1391 1126" -> "1394 1127" [label="[-1, 1]", style=solid]; +"1391 1126" -> "1396 1133" [label="[-1, 1]", style=solid]; +"1392 1130" -> "1393 1132" [label="[-1, 1]", style=solid]; +"1393 1132" -> "1396 1133" [label="[-1, 1]", style=solid]; +"1394 1127" -> "1395 1129" [label="[-1, 1]", style=solid]; +"1395 1129" -> "1396 1133" [label="[-1, 1]", style=solid]; +"1396 1133" -> "1397 1134" [label="[-1, 4]", style=solid]; +"1397 1134" -> "1398 1136" [label="[-1, 1]", style=solid]; +"1397 1134" -> "1401 1135" [label="[-1, 1]", style=solid]; +"1398 1136" -> "1399 1142" [label="[-1]", style=solid]; +"1399 1142" -> "1400 1143" [label="[-1]", style=dashed]; +"1400 1143" -> "1404 1147" [label="[-1]", style=dashed]; +"1401 1135" -> "1402 1138" [label="[-1]", style=solid]; +"1402 1138" -> "1403 1139" [label="[-1]", style=dashed]; +"1403 1139" -> "1404 1147" [label="[-1]", style=dashed]; +"1404 1147" -> "1405 1149" [label="[-1]", style=dashed]; +"1405 1149" -> "1406 1150" [label="[-1]", style=solid]; +"1406 1150" -> "1407 1151" [label="[1, -1]", style=dashed]; +"1407 1151" -> "1408 1152" [label="[-1, 1]", style=dashed]; +"1408 1152" -> "1410 1156" [label="[-1]", style=dashed]; +"1408 1152" -> "1413 1153" [label="[-1]", style=dashed]; +"1409 1098" -> "1410 1156" [label="[]", style=solid]; +"1410 1156" -> "1411 1158" [label="[]", style=solid]; +"1410 1156" -> "1420 1174" [label="[]", style=solid]; +"1411 1158" -> "1412 1159" [label="[]", style=solid]; +"1412 1159" -> "1416 1162" [label="[]", style=solid]; +"1413 1153" -> "1414 1154" [label="[-1]", style=dashed]; +"1414 1154" -> "1415 1157" [label="[-1, 4]", style=solid]; +"1414 1154" -> "1439 1172" [label="[-1, 4]", style=solid]; +"1415 1157" -> "1416 1162" [label="[1, -1, 4]", style=solid]; +"1416 1162" -> "1417 1164" [label="[-1, 3]", style=dashed]; +"1417 1164" -> "1418 1165" [label="[-1, 1]", style=dashed]; +"1418 1165" -> "1419 1170" [label="[-1]", style=dashed]; +"1419 1170" -> "1420 1174" [label="[-1]", style=dashed]; +"1419 1170" -> "1438 1171" [label="[-1]", style=dashed]; +"1420 1174" -> "1421 2480" [label="[]", style=solid]; +"1421 2480" -> "1422 2481" [label="[]", style=solid]; +"1421 2480" -> "1428 2488" [label="[]", style=solid]; +"1422 2481" -> "1423 2483" [label="[-1]", style=dashed]; +"1423 2483" -> "1424 2484" [label="[-1]", style=dashed]; +"1424 2484" -> "1425 2485" [label="[-1]", style=dashed]; "1425 2485" -> "1426 2486" [label="[]", style=dashed]; "1426 2486" -> "1427 2487" [label="[]", style=dashed]; "1427 2487" -> "1428 2488" [label="[1]", style=dashed]; -"1428 2488" -> "1429 2489" [label="[1]", style=dashed]; -"1429 2489" -> "1441 2490" [label="[1]", style=dashed]; -"1430 2475" -> "1431 2476" [label="[1]", style=dashed]; -"1431 2476" -> "1440 2479" [label="[1]", style=solid]; -"1432 2149" -> "1433 2150" [label="[1]", style=dashed]; -"1433 2150" -> "1440 2479" [label="[1]", style=solid]; -"1434 1823" -> "1435 1824" [label="[1]", style=dashed]; -"1435 1824" -> "1440 2479" [label="[1]", style=solid]; -"1436 1497" -> "1437 1498" [label="[1]", style=dashed]; -"1437 1498" -> "1440 2479" [label="[1]", style=solid]; -"1438 1171" -> "1439 1172" [label="[1]", style=dashed]; -"1439 1172" -> "1440 2479" [label="[1]", style=solid]; -"1440 2479" -> "1441 2490" [label="[1]", style=solid]; -"1441 2490" -> "1442 2532" [label="[1]", style=solid]; -"1441 2490" -> "1444 2525" [label="[1]", style=solid]; -"1441 2490" -> "1448 2515" [label="[1]", style=solid]; -"1441 2490" -> "1450 2508" [label="[1]", style=solid]; -"1441 2490" -> "1471 2495" [label="[1]", style=solid]; -"1441 2490" -> "1475 2503" [label="[1]", style=solid]; -"1441 2490" -> "1663 2775" [label="[1]", style=solid]; -"1442 2532" -> "1443 2534" [label="[1]", style=solid]; -"1443 2534" -> "1446 2535" [label="[1]", style=solid]; -"1444 2525" -> "1445 2527" [label="[1]", style=solid]; -"1445 2527" -> "1446 2535" [label="[1]", style=solid]; -"1446 2535" -> "1447 2537" [label="[1]", style=solid]; -"1447 2537" -> "1454 2538" [label="[1]", style=solid]; -"1448 2515" -> "1449 2517" [label="[1]", style=solid]; -"1449 2517" -> "1452 2518" [label="[1]", style=solid]; -"1450 2508" -> "1451 2510" [label="[1]", style=solid]; -"1451 2510" -> "1452 2518" [label="[1]", style=solid]; -"1452 2518" -> "1453 2520" [label="[1]", style=solid]; -"1453 2520" -> "1454 2538" [label="[1]", style=solid]; -"1454 2538" -> "1455 2539" [label="[1]", style=solid]; -"1455 2539" -> "1456 2542" [label="[1]", style=solid]; -"1456 2542" -> "1457 2543" [label="[1]", style=solid]; -"1457 2543" -> "1458 2544" [label="[1]", style=solid]; -"1458 2544" -> "1459 2546" [label="[1]", style=solid]; -"1459 2546" -> "1460 2548" [label="[1]", style=solid]; -"1460 2548" -> "1461 2549" [label="[1]", style=solid]; -"1461 2549" -> "1462 2550" [label="[1]", style=solid]; -"1462 2550" -> "1463 2551" [label="[1]", style=solid]; -"1463 2551" -> "1464 2553" [label="[1]", style=dashed]; -"1464 2553" -> "1465 2555" [label="[1]", style=dashed]; -"1464 2553" -> "1492 2641" [label="[1]", style=dashed]; -"1464 2553" -> "1512 2623" [label="[1]", style=dashed]; -"1464 2553" -> "1518 2572" [label="[1]", style=dashed]; -"1464 2553" -> "1540 2669" [label="[1]", style=dashed]; -"1464 2553" -> "1552 2589" [label="[1]", style=dashed]; -"1464 2553" -> "1574 2697" [label="[1]", style=dashed]; -"1464 2553" -> "1586 2606" [label="[1]", style=dashed]; -"1464 2553" -> "1608 2725" [label="[1]", style=dashed]; -"1465 2555" -> "1466 2557" [label="[1]", style=dashed]; -"1466 2557" -> "1467 2558" [label="[1]", style=solid]; -"1467 2558" -> "1468 2559" [label="[1]", style=dashed]; -"1468 2559" -> "1469 2560" [label="[1]", style=dashed]; -"1469 2560" -> "1470 2561" [label="[1]", style=dashed]; -"1470 2561" -> "1476 2562" [label="[1]", style=dashed]; -"1471 2495" -> "1472 2500" [label="[1]", style=solid]; -"1472 2500" -> "1473 2501" [label="[1]", style=solid]; -"1473 2501" -> "1474 2502" [label="[1]", style=dashed]; -"1474 2502" -> "1475 2503" [label="[1]", style=solid]; -"1475 2503" -> "1476 2562" [label="[1]", style=solid]; -"1475 2503" -> "1524 2579" [label="[1]", style=solid]; -"1475 2503" -> "1558 2596" [label="[1]", style=solid]; -"1475 2503" -> "1592 2613" [label="[1]", style=solid]; -"1476 2562" -> "1477 2568" [label="[1]", style=solid]; -"1476 2562" -> "1478 2564" [label="[1]", style=solid]; -"1477 2568" -> "1481 2569" [label="[1]", style=solid]; -"1478 2564" -> "1479 2565" [label="[1]", style=solid]; -"1479 2565" -> "1480 2566" [label="[1]", style=solid]; -"1480 2566" -> "1481 2569" [label="[1]", style=dashed]; -"1481 2569" -> "1482 2570" [label="[1]", style=solid]; -"1482 2570" -> "1483 2658" [label="[1]", style=solid]; -"1482 2570" -> "1486 2655" [label="[1]", style=solid]; -"1482 2570" -> "1489 2652" [label="[1]", style=solid]; -"1482 2570" -> "1503 2632" [label="[1]", style=solid]; -"1482 2570" -> "1506 2629" [label="[1]", style=solid]; -"1482 2570" -> "1509 2626" [label="[1]", style=solid]; -"1482 2570" -> "1517 2667" [label="[1]", style=solid]; +"1428 2488" -> "1429 2489" [label="[]", style=dashed]; +"1429 2489" -> "1441 2490" [label="[]", style=dashed]; +"1430 2475" -> "1431 2476" [label="[-1]", style=dashed]; +"1431 2476" -> "1440 2479" [label="[-1, 4]", style=solid]; +"1432 2149" -> "1433 2150" [label="[-1]", style=dashed]; +"1433 2150" -> "1440 2479" [label="[-1, 4]", style=solid]; +"1434 1823" -> "1435 1824" [label="[-1]", style=dashed]; +"1435 1824" -> "1440 2479" [label="[-1, 4]", style=solid]; +"1436 1497" -> "1437 1498" [label="[-1]", style=dashed]; +"1437 1498" -> "1440 2479" [label="[-1, 4]", style=solid]; +"1438 1171" -> "1439 1172" [label="[-1]", style=dashed]; +"1439 1172" -> "1440 2479" [label="[-1, 4]", style=solid]; +"1440 2479" -> "1441 2490" [label="[-1, 4]", style=solid]; +"1441 2490" -> "1442 2532" [label="[]", style=solid]; +"1441 2490" -> "1444 2525" [label="[]", style=solid]; +"1441 2490" -> "1448 2515" [label="[]", style=solid]; +"1441 2490" -> "1450 2508" [label="[]", style=solid]; +"1441 2490" -> "1471 2495" [label="[]", style=solid]; +"1441 2490" -> "1475 2503" [label="[]", style=solid]; +"1441 2490" -> "1663 2775" [label="[]", style=solid]; +"1442 2532" -> "1443 2534" [label="[]", style=solid]; +"1443 2534" -> "1446 2535" [label="[]", style=solid]; +"1444 2525" -> "1445 2527" [label="[]", style=solid]; +"1445 2527" -> "1446 2535" [label="[]", style=solid]; +"1446 2535" -> "1447 2537" [label="[]", style=solid]; +"1447 2537" -> "1454 2538" [label="[]", style=solid]; +"1448 2515" -> "1449 2517" [label="[]", style=solid]; +"1449 2517" -> "1452 2518" [label="[]", style=solid]; +"1450 2508" -> "1451 2510" [label="[]", style=solid]; +"1451 2510" -> "1452 2518" [label="[]", style=solid]; +"1452 2518" -> "1453 2520" [label="[]", style=solid]; +"1453 2520" -> "1454 2538" [label="[]", style=solid]; +"1454 2538" -> "1455 2539" [label="[]", style=solid]; +"1455 2539" -> "1456 2542" [label="[]", style=solid]; +"1456 2542" -> "1457 2543" [label="[]", style=solid]; +"1457 2543" -> "1458 2544" [label="[]", style=solid]; +"1458 2544" -> "1459 2546" [label="[]", style=solid]; +"1459 2546" -> "1460 2548" [label="[]", style=solid]; +"1460 2548" -> "1461 2549" [label="[]", style=solid]; +"1461 2549" -> "1462 2550" [label="[]", style=solid]; +"1462 2550" -> "1463 2551" [label="[]", style=solid]; +"1463 2551" -> "1464 2553" [label="[]", style=dashed]; +"1464 2553" -> "1465 2555" [label="[]", style=dashed]; +"1464 2553" -> "1492 2641" [label="[]", style=dashed]; +"1464 2553" -> "1512 2623" [label="[]", style=dashed]; +"1464 2553" -> "1518 2572" [label="[]", style=dashed]; +"1464 2553" -> "1540 2669" [label="[]", style=dashed]; +"1464 2553" -> "1552 2589" [label="[]", style=dashed]; +"1464 2553" -> "1574 2697" [label="[]", style=dashed]; +"1464 2553" -> "1586 2606" [label="[]", style=dashed]; +"1464 2553" -> "1608 2725" [label="[]", style=dashed]; +"1465 2555" -> "1466 2557" [label="[]", style=dashed]; +"1466 2557" -> "1467 2558" [label="[]", style=solid]; +"1467 2558" -> "1468 2559" [label="[-1, -1]", style=dashed]; +"1468 2559" -> "1469 2560" [label="[-1, -1]", style=dashed]; +"1469 2560" -> "1470 2561" [label="[-1]", style=dashed]; +"1470 2561" -> "1476 2562" [label="[-1]", style=dashed]; +"1471 2495" -> "1472 2500" [label="[]", style=solid]; +"1472 2500" -> "1473 2501" [label="[]", style=solid]; +"1473 2501" -> "1474 2502" [label="[-1]", style=dashed]; +"1474 2502" -> "1475 2503" [label="[]", style=solid]; +"1475 2503" -> "1476 2562" [label="[]", style=solid]; +"1475 2503" -> "1524 2579" [label="[]", style=solid]; +"1475 2503" -> "1558 2596" [label="[]", style=solid]; +"1475 2503" -> "1592 2613" [label="[]", style=solid]; +"1476 2562" -> "1477 2568" [label="[]", style=solid]; +"1476 2562" -> "1478 2564" [label="[]", style=solid]; +"1477 2568" -> "1481 2569" [label="[]", style=solid]; +"1478 2564" -> "1479 2565" [label="[]", style=solid]; +"1479 2565" -> "1480 2566" [label="[]", style=solid]; +"1480 2566" -> "1481 2569" [label="[]", style=dashed]; +"1481 2569" -> "1482 2570" [label="[-1, 256, 7, 7]", style=solid]; +"1482 2570" -> "1483 2658" [label="[-1, 256, 7, 7]", style=solid]; +"1482 2570" -> "1486 2655" [label="[-1, 256, 7, 7]", style=solid]; +"1482 2570" -> "1489 2652" [label="[-1, 256, 7, 7]", style=solid]; +"1482 2570" -> "1503 2632" [label="[-1, 256, 7, 7]", style=solid]; +"1482 2570" -> "1506 2629" [label="[-1, 256, 7, 7]", style=solid]; +"1482 2570" -> "1509 2626" [label="[-1, 256, 7, 7]", style=solid]; +"1482 2570" -> "1517 2667" [label="[-1, 256, 7, 7]", style=solid]; "1483 2658" -> "1484 2659" [label="[4]", style=dashed]; "1484 2659" -> "1485 2663" [label="[]", style=dashed]; "1485 2663" -> "1500 2664" [label="[1]", style=dashed]; @@ -5702,18 +5702,18 @@ strict digraph { "1489 2652" -> "1490 2653" [label="[4]", style=dashed]; "1490 2653" -> "1491 2661" [label="[]", style=dashed]; "1491 2661" -> "1500 2664" [label="[1]", style=dashed]; -"1492 2641" -> "1493 2643" [label="[1]", style=dashed]; -"1493 2643" -> "1494 2644" [label="[1]", style=solid]; -"1494 2644" -> "1495 2645" [label="[1]", style=dashed]; -"1495 2645" -> "1496 2647" [label="[1]", style=dashed]; -"1496 2647" -> "1497 2649" [label="[1]", style=dashed]; -"1496 2647" -> "1501 2665" [label="[1]", style=dashed]; +"1492 2641" -> "1493 2643" [label="[]", style=dashed]; +"1493 2643" -> "1494 2644" [label="[]", style=solid]; +"1494 2644" -> "1495 2645" [label="[-1, -1]", style=dashed]; +"1495 2645" -> "1496 2647" [label="[-1, -1]", style=dashed]; +"1496 2647" -> "1497 2649" [label="[-1, 1, 1, 1]", style=dashed]; +"1496 2647" -> "1501 2665" [label="[-1, 1, 1, 1]", style=dashed]; "1497 2649" -> "1498 2650" [label="[4]", style=dashed]; "1498 2650" -> "1499 2660" [label="[]", style=dashed]; "1499 2660" -> "1500 2664" [label="[1]", style=dashed]; "1500 2664" -> "1501 2665" [label="[4]", style=dashed]; -"1501 2665" -> "1502 2666" [label="[1]", style=dashed]; -"1502 2666" -> "1517 2667" [label="[1]", style=dashed]; +"1501 2665" -> "1502 2666" [label="[-1, -1, -1, -1]", style=dashed]; +"1502 2666" -> "1517 2667" [label="[-1, -1, -1, -1]", style=dashed]; "1503 2632" -> "1504 2633" [label="[4]", style=dashed]; "1504 2633" -> "1505 2637" [label="[]", style=dashed]; "1505 2637" -> "1515 2638" [label="[1]", style=dashed]; @@ -5723,29 +5723,29 @@ strict digraph { "1509 2626" -> "1510 2627" [label="[4]", style=dashed]; "1510 2627" -> "1511 2635" [label="[]", style=dashed]; "1511 2635" -> "1515 2638" [label="[1]", style=dashed]; -"1512 2623" -> "1513 2624" [label="[1]", style=dashed]; +"1512 2623" -> "1513 2624" [label="[-1]", style=dashed]; "1513 2624" -> "1514 2634" [label="[]", style=dashed]; "1514 2634" -> "1515 2638" [label="[1]", style=dashed]; "1515 2638" -> "1516 2639" [label="[4]", style=dashed]; -"1516 2639" -> "1517 2667" [label="[1]", style=solid]; -"1517 2667" -> "1551 2695" [label="[1]", style=solid]; -"1518 2572" -> "1519 2574" [label="[1]", style=dashed]; -"1519 2574" -> "1520 2575" [label="[1]", style=solid]; -"1520 2575" -> "1521 2576" [label="[1]", style=dashed]; -"1521 2576" -> "1522 2577" [label="[1]", style=dashed]; -"1522 2577" -> "1523 2578" [label="[1]", style=dashed]; -"1523 2578" -> "1524 2579" [label="[1]", style=dashed]; -"1524 2579" -> "1525 2585" [label="[1]", style=solid]; -"1524 2579" -> "1526 2581" [label="[1]", style=solid]; -"1525 2585" -> "1529 2586" [label="[1]", style=solid]; -"1526 2581" -> "1527 2582" [label="[1]", style=solid]; -"1527 2582" -> "1528 2583" [label="[1]", style=solid]; -"1528 2583" -> "1529 2586" [label="[1]", style=dashed]; -"1529 2586" -> "1530 2587" [label="[1]", style=solid]; -"1530 2587" -> "1531 2686" [label="[1]", style=solid]; -"1530 2587" -> "1534 2683" [label="[1]", style=solid]; -"1530 2587" -> "1537 2680" [label="[1]", style=solid]; -"1530 2587" -> "1551 2695" [label="[1]", style=solid]; +"1516 2639" -> "1517 2667" [label="[-1, -1, -1, -1]", style=solid]; +"1517 2667" -> "1551 2695" [label="[-1, -1, -1, -1]", style=solid]; +"1518 2572" -> "1519 2574" [label="[]", style=dashed]; +"1519 2574" -> "1520 2575" [label="[]", style=solid]; +"1520 2575" -> "1521 2576" [label="[-1, -1]", style=dashed]; +"1521 2576" -> "1522 2577" [label="[-1, -1]", style=dashed]; +"1522 2577" -> "1523 2578" [label="[-1]", style=dashed]; +"1523 2578" -> "1524 2579" [label="[-1]", style=dashed]; +"1524 2579" -> "1525 2585" [label="[]", style=solid]; +"1524 2579" -> "1526 2581" [label="[]", style=solid]; +"1525 2585" -> "1529 2586" [label="[]", style=solid]; +"1526 2581" -> "1527 2582" [label="[]", style=solid]; +"1527 2582" -> "1528 2583" [label="[]", style=solid]; +"1528 2583" -> "1529 2586" [label="[]", style=dashed]; +"1529 2586" -> "1530 2587" [label="[-1, 256, 7, 7]", style=solid]; +"1530 2587" -> "1531 2686" [label="[-1, 256, 7, 7]", style=solid]; +"1530 2587" -> "1534 2683" [label="[-1, 256, 7, 7]", style=solid]; +"1530 2587" -> "1537 2680" [label="[-1, 256, 7, 7]", style=solid]; +"1530 2587" -> "1551 2695" [label="[-1, 256, 7, 7]", style=solid]; "1531 2686" -> "1532 2687" [label="[4]", style=dashed]; "1532 2687" -> "1533 2691" [label="[]", style=dashed]; "1533 2691" -> "1548 2692" [label="[1]", style=dashed]; @@ -5755,36 +5755,36 @@ strict digraph { "1537 2680" -> "1538 2681" [label="[4]", style=dashed]; "1538 2681" -> "1539 2689" [label="[]", style=dashed]; "1539 2689" -> "1548 2692" [label="[1]", style=dashed]; -"1540 2669" -> "1541 2671" [label="[1]", style=dashed]; -"1541 2671" -> "1542 2672" [label="[1]", style=solid]; -"1542 2672" -> "1543 2673" [label="[1]", style=dashed]; -"1543 2673" -> "1544 2675" [label="[1]", style=dashed]; -"1544 2675" -> "1545 2677" [label="[1]", style=dashed]; -"1544 2675" -> "1549 2693" [label="[1]", style=dashed]; +"1540 2669" -> "1541 2671" [label="[]", style=dashed]; +"1541 2671" -> "1542 2672" [label="[]", style=solid]; +"1542 2672" -> "1543 2673" [label="[-1, -1]", style=dashed]; +"1543 2673" -> "1544 2675" [label="[-1, -1]", style=dashed]; +"1544 2675" -> "1545 2677" [label="[-1, 1, 1, 1]", style=dashed]; +"1544 2675" -> "1549 2693" [label="[-1, 1, 1, 1]", style=dashed]; "1545 2677" -> "1546 2678" [label="[4]", style=dashed]; "1546 2678" -> "1547 2688" [label="[]", style=dashed]; "1547 2688" -> "1548 2692" [label="[1]", style=dashed]; "1548 2692" -> "1549 2693" [label="[4]", style=dashed]; -"1549 2693" -> "1550 2694" [label="[1]", style=dashed]; -"1550 2694" -> "1551 2695" [label="[1]", style=dashed]; -"1551 2695" -> "1585 2723" [label="[1]", style=solid]; -"1552 2589" -> "1553 2591" [label="[1]", style=dashed]; -"1553 2591" -> "1554 2592" [label="[1]", style=solid]; -"1554 2592" -> "1555 2593" [label="[1]", style=dashed]; -"1555 2593" -> "1556 2594" [label="[1]", style=dashed]; -"1556 2594" -> "1557 2595" [label="[1]", style=dashed]; -"1557 2595" -> "1558 2596" [label="[1]", style=dashed]; -"1558 2596" -> "1559 2602" [label="[1]", style=solid]; -"1558 2596" -> "1560 2598" [label="[1]", style=solid]; -"1559 2602" -> "1563 2603" [label="[1]", style=solid]; -"1560 2598" -> "1561 2599" [label="[1]", style=solid]; -"1561 2599" -> "1562 2600" [label="[1]", style=solid]; -"1562 2600" -> "1563 2603" [label="[1]", style=dashed]; -"1563 2603" -> "1564 2604" [label="[1]", style=solid]; -"1564 2604" -> "1565 2714" [label="[1]", style=solid]; -"1564 2604" -> "1568 2711" [label="[1]", style=solid]; -"1564 2604" -> "1571 2708" [label="[1]", style=solid]; -"1564 2604" -> "1585 2723" [label="[1]", style=solid]; +"1549 2693" -> "1550 2694" [label="[-1, -1, -1, -1]", style=dashed]; +"1550 2694" -> "1551 2695" [label="[-1, -1, -1, -1]", style=dashed]; +"1551 2695" -> "1585 2723" [label="[-1, -1, -1, -1]", style=solid]; +"1552 2589" -> "1553 2591" [label="[]", style=dashed]; +"1553 2591" -> "1554 2592" [label="[]", style=solid]; +"1554 2592" -> "1555 2593" [label="[-1, -1]", style=dashed]; +"1555 2593" -> "1556 2594" [label="[-1, -1]", style=dashed]; +"1556 2594" -> "1557 2595" [label="[-1]", style=dashed]; +"1557 2595" -> "1558 2596" [label="[-1]", style=dashed]; +"1558 2596" -> "1559 2602" [label="[]", style=solid]; +"1558 2596" -> "1560 2598" [label="[]", style=solid]; +"1559 2602" -> "1563 2603" [label="[]", style=solid]; +"1560 2598" -> "1561 2599" [label="[]", style=solid]; +"1561 2599" -> "1562 2600" [label="[]", style=solid]; +"1562 2600" -> "1563 2603" [label="[]", style=dashed]; +"1563 2603" -> "1564 2604" [label="[-1, 256, 7, 7]", style=solid]; +"1564 2604" -> "1565 2714" [label="[-1, 256, 7, 7]", style=solid]; +"1564 2604" -> "1568 2711" [label="[-1, 256, 7, 7]", style=solid]; +"1564 2604" -> "1571 2708" [label="[-1, 256, 7, 7]", style=solid]; +"1564 2604" -> "1585 2723" [label="[-1, 256, 7, 7]", style=solid]; "1565 2714" -> "1566 2715" [label="[4]", style=dashed]; "1566 2715" -> "1567 2719" [label="[]", style=dashed]; "1567 2719" -> "1582 2720" [label="[1]", style=dashed]; @@ -5794,36 +5794,36 @@ strict digraph { "1571 2708" -> "1572 2709" [label="[4]", style=dashed]; "1572 2709" -> "1573 2717" [label="[]", style=dashed]; "1573 2717" -> "1582 2720" [label="[1]", style=dashed]; -"1574 2697" -> "1575 2699" [label="[1]", style=dashed]; -"1575 2699" -> "1576 2700" [label="[1]", style=solid]; -"1576 2700" -> "1577 2701" [label="[1]", style=dashed]; -"1577 2701" -> "1578 2703" [label="[1]", style=dashed]; -"1578 2703" -> "1579 2705" [label="[1]", style=dashed]; -"1578 2703" -> "1583 2721" [label="[1]", style=dashed]; +"1574 2697" -> "1575 2699" [label="[]", style=dashed]; +"1575 2699" -> "1576 2700" [label="[]", style=solid]; +"1576 2700" -> "1577 2701" [label="[-1, -1]", style=dashed]; +"1577 2701" -> "1578 2703" [label="[-1, -1]", style=dashed]; +"1578 2703" -> "1579 2705" [label="[-1, 1, 1, 1]", style=dashed]; +"1578 2703" -> "1583 2721" [label="[-1, 1, 1, 1]", style=dashed]; "1579 2705" -> "1580 2706" [label="[4]", style=dashed]; "1580 2706" -> "1581 2716" [label="[]", style=dashed]; "1581 2716" -> "1582 2720" [label="[1]", style=dashed]; "1582 2720" -> "1583 2721" [label="[4]", style=dashed]; -"1583 2721" -> "1584 2722" [label="[1]", style=dashed]; -"1584 2722" -> "1585 2723" [label="[1]", style=dashed]; -"1585 2723" -> "1619 2751" [label="[1]", style=solid]; -"1586 2606" -> "1587 2608" [label="[1]", style=dashed]; -"1587 2608" -> "1588 2609" [label="[1]", style=solid]; -"1588 2609" -> "1589 2610" [label="[1]", style=dashed]; -"1589 2610" -> "1590 2611" [label="[1]", style=dashed]; -"1590 2611" -> "1591 2612" [label="[1]", style=dashed]; -"1591 2612" -> "1592 2613" [label="[1]", style=dashed]; -"1592 2613" -> "1593 2619" [label="[1]", style=solid]; -"1592 2613" -> "1594 2615" [label="[1]", style=solid]; -"1593 2619" -> "1597 2620" [label="[1]", style=solid]; -"1594 2615" -> "1595 2616" [label="[1]", style=solid]; -"1595 2616" -> "1596 2617" [label="[1]", style=solid]; -"1596 2617" -> "1597 2620" [label="[1]", style=dashed]; -"1597 2620" -> "1598 2621" [label="[1]", style=solid]; -"1598 2621" -> "1599 2742" [label="[1]", style=solid]; -"1598 2621" -> "1602 2739" [label="[1]", style=solid]; -"1598 2621" -> "1605 2736" [label="[1]", style=solid]; -"1598 2621" -> "1619 2751" [label="[1]", style=solid]; +"1583 2721" -> "1584 2722" [label="[-1, -1, -1, -1]", style=dashed]; +"1584 2722" -> "1585 2723" [label="[-1, -1, -1, -1]", style=dashed]; +"1585 2723" -> "1619 2751" [label="[-1, -1, -1, -1]", style=solid]; +"1586 2606" -> "1587 2608" [label="[]", style=dashed]; +"1587 2608" -> "1588 2609" [label="[]", style=solid]; +"1588 2609" -> "1589 2610" [label="[-1, -1]", style=dashed]; +"1589 2610" -> "1590 2611" [label="[-1, -1]", style=dashed]; +"1590 2611" -> "1591 2612" [label="[-1]", style=dashed]; +"1591 2612" -> "1592 2613" [label="[-1]", style=dashed]; +"1592 2613" -> "1593 2619" [label="[]", style=solid]; +"1592 2613" -> "1594 2615" [label="[]", style=solid]; +"1593 2619" -> "1597 2620" [label="[]", style=solid]; +"1594 2615" -> "1595 2616" [label="[]", style=solid]; +"1595 2616" -> "1596 2617" [label="[]", style=solid]; +"1596 2617" -> "1597 2620" [label="[]", style=dashed]; +"1597 2620" -> "1598 2621" [label="[-1, 256, 7, 7]", style=solid]; +"1598 2621" -> "1599 2742" [label="[-1, 256, 7, 7]", style=solid]; +"1598 2621" -> "1602 2739" [label="[-1, 256, 7, 7]", style=solid]; +"1598 2621" -> "1605 2736" [label="[-1, 256, 7, 7]", style=solid]; +"1598 2621" -> "1619 2751" [label="[-1, 256, 7, 7]", style=solid]; "1599 2742" -> "1600 2743" [label="[4]", style=dashed]; "1600 2743" -> "1601 2747" [label="[]", style=dashed]; "1601 2747" -> "1616 2748" [label="[1]", style=dashed]; @@ -5833,2475 +5833,2475 @@ strict digraph { "1605 2736" -> "1606 2737" [label="[4]", style=dashed]; "1606 2737" -> "1607 2745" [label="[]", style=dashed]; "1607 2745" -> "1616 2748" [label="[1]", style=dashed]; -"1608 2725" -> "1609 2727" [label="[1]", style=dashed]; -"1609 2727" -> "1610 2728" [label="[1]", style=solid]; -"1610 2728" -> "1611 2729" [label="[1]", style=dashed]; -"1611 2729" -> "1612 2731" [label="[1]", style=dashed]; -"1612 2731" -> "1613 2733" [label="[1]", style=dashed]; -"1612 2731" -> "1617 2749" [label="[1]", style=dashed]; +"1608 2725" -> "1609 2727" [label="[]", style=dashed]; +"1609 2727" -> "1610 2728" [label="[]", style=solid]; +"1610 2728" -> "1611 2729" [label="[-1, -1]", style=dashed]; +"1611 2729" -> "1612 2731" [label="[-1, -1]", style=dashed]; +"1612 2731" -> "1613 2733" [label="[-1, 1, 1, 1]", style=dashed]; +"1612 2731" -> "1617 2749" [label="[-1, 1, 1, 1]", style=dashed]; "1613 2733" -> "1614 2734" [label="[4]", style=dashed]; "1614 2734" -> "1615 2744" [label="[]", style=dashed]; "1615 2744" -> "1616 2748" [label="[1]", style=dashed]; "1616 2748" -> "1617 2749" [label="[4]", style=dashed]; -"1617 2749" -> "1618 2750" [label="[1]", style=dashed]; -"1618 2750" -> "1619 2751" [label="[1]", style=dashed]; -"1619 2751" -> "1621 2753" [label="[1]", style=solid]; -"1619 2751" -> "1625 2759" [label="[1]", style=solid]; +"1617 2749" -> "1618 2750" [label="[-1, -1, -1, -1]", style=dashed]; +"1618 2750" -> "1619 2751" [label="[-1, -1, -1, -1]", style=dashed]; +"1619 2751" -> "1621 2753" [label="[-1, -1, -1, -1]", style=solid]; +"1619 2751" -> "1625 2759" [label="[-1, -1, -1, -1]", style=solid]; "1620 2757" -> "1624 2758" [label="[1]", style=dashed]; "1621 2753" -> "1622 2754" [label="[4]", style=dashed]; "1622 2754" -> "1623 2756" [label="[]", style=dashed]; "1623 2756" -> "1624 2758" [label="[1]", style=dashed]; "1624 2758" -> "1625 2759" [label="[2]", style=dashed]; -"1625 2759" -> "1626 2762_MatMul" [label="[1]", style=solid]; -"1626 2762_MatMul" -> "1627 2762_Add" [label="[1]", style=solid]; -"1627 2762_Add" -> "1628 2763" [label="[1]", style=solid]; -"1628 2763" -> "1629 2766_MatMul" [label="[1]", style=solid]; -"1629 2766_MatMul" -> "1630 2766_Add" [label="[1]", style=solid]; -"1630 2766_Add" -> "1631 2767" [label="[1]", style=solid]; -"1631 2767" -> "1632 2770_MatMul" [label="[1]", style=solid]; -"1631 2767" -> "1659 2773_MatMul" [label="[1]", style=solid]; -"1632 2770_MatMul" -> "1633 2770_Add" [label="[1]", style=solid]; -"1633 2770_Add" -> "1634 2774" [label="[1]", style=solid]; -"1634 2774" -> "1635 2950" [label="[1]", style=solid]; -"1634 2774" -> "1640 2955" [label="[1]", style=solid]; -"1635 2950" -> "1636 2951" [label="[1]", style=dashed]; +"1625 2759" -> "1626 2762_MatMul" [label="[]", style=solid]; +"1626 2762_MatMul" -> "1627 2762_Add" [label="[]", style=solid]; +"1627 2762_Add" -> "1628 2763" [label="[]", style=solid]; +"1628 2763" -> "1629 2766_MatMul" [label="[]", style=solid]; +"1629 2766_MatMul" -> "1630 2766_Add" [label="[]", style=solid]; +"1630 2766_Add" -> "1631 2767" [label="[]", style=solid]; +"1631 2767" -> "1632 2770_MatMul" [label="[]", style=solid]; +"1631 2767" -> "1659 2773_MatMul" [label="[]", style=solid]; +"1632 2770_MatMul" -> "1633 2770_Add" [label="[]", style=solid]; +"1633 2770_Add" -> "1634 2774" [label="[]", style=solid]; +"1634 2774" -> "1635 2950" [label="[]", style=solid]; +"1634 2774" -> "1640 2955" [label="[]", style=solid]; +"1635 2950" -> "1636 2951" [label="[-1]", style=dashed]; "1636 2951" -> "1637 2992" [label="[]", style=dashed]; "1636 2951" -> "1655 2984" [label="[]", style=dashed]; "1637 2992" -> "1639 2993" [label="[1]", style=dashed]; "1638 2991" -> "1639 2993" [label="[1]", style=dashed]; "1639 2993" -> "1641 2994" [label="[2]", style=dashed]; -"1640 2955" -> "1641 2994" [label="[1]", style=solid]; -"1641 2994" -> "1642 2996" [label="[1]", style=solid]; -"1641 2994" -> "1651 6486" [label="[1]", style=solid]; -"1641 2994" -> "1761 6442" [label="[1]", style=solid]; -"1641 2994" -> "1779 6398" [label="[1]", style=solid]; -"1641 2994" -> "1797 6354" [label="[1]", style=solid]; -"1641 2994" -> "1815 6310" [label="[1]", style=solid]; -"1641 2994" -> "1833 6266" [label="[1]", style=solid]; -"1641 2994" -> "1851 6222" [label="[1]", style=solid]; -"1641 2994" -> "1869 6178" [label="[1]", style=solid]; -"1641 2994" -> "1887 6134" [label="[1]", style=solid]; -"1641 2994" -> "1905 6090" [label="[1]", style=solid]; -"1641 2994" -> "1923 6046" [label="[1]", style=solid]; -"1641 2994" -> "1941 6002" [label="[1]", style=solid]; -"1641 2994" -> "1959 5958" [label="[1]", style=solid]; -"1641 2994" -> "1977 5914" [label="[1]", style=solid]; -"1641 2994" -> "1995 5870" [label="[1]", style=solid]; -"1641 2994" -> "2013 5826" [label="[1]", style=solid]; -"1641 2994" -> "2031 5782" [label="[1]", style=solid]; -"1641 2994" -> "2049 5738" [label="[1]", style=solid]; -"1641 2994" -> "2067 5694" [label="[1]", style=solid]; -"1641 2994" -> "2085 5650" [label="[1]", style=solid]; -"1641 2994" -> "2103 5606" [label="[1]", style=solid]; -"1641 2994" -> "2121 5562" [label="[1]", style=solid]; -"1641 2994" -> "2139 5518" [label="[1]", style=solid]; -"1641 2994" -> "2157 5474" [label="[1]", style=solid]; -"1641 2994" -> "2175 5430" [label="[1]", style=solid]; -"1641 2994" -> "2193 5386" [label="[1]", style=solid]; -"1641 2994" -> "2211 5342" [label="[1]", style=solid]; -"1641 2994" -> "2229 5298" [label="[1]", style=solid]; -"1641 2994" -> "2247 5254" [label="[1]", style=solid]; -"1641 2994" -> "2265 5210" [label="[1]", style=solid]; -"1641 2994" -> "2283 5166" [label="[1]", style=solid]; -"1641 2994" -> "2301 5122" [label="[1]", style=solid]; -"1641 2994" -> "2319 5078" [label="[1]", style=solid]; -"1641 2994" -> "2337 5034" [label="[1]", style=solid]; -"1641 2994" -> "2355 4990" [label="[1]", style=solid]; -"1641 2994" -> "2373 4946" [label="[1]", style=solid]; -"1641 2994" -> "2391 4902" [label="[1]", style=solid]; -"1641 2994" -> "2409 4858" [label="[1]", style=solid]; -"1641 2994" -> "2427 4814" [label="[1]", style=solid]; -"1641 2994" -> "2445 4770" [label="[1]", style=solid]; -"1641 2994" -> "2463 4726" [label="[1]", style=solid]; -"1641 2994" -> "2481 4682" [label="[1]", style=solid]; -"1641 2994" -> "2499 4638" [label="[1]", style=solid]; -"1641 2994" -> "2517 4594" [label="[1]", style=solid]; -"1641 2994" -> "2535 4550" [label="[1]", style=solid]; -"1641 2994" -> "2553 4506" [label="[1]", style=solid]; -"1641 2994" -> "2571 4462" [label="[1]", style=solid]; -"1641 2994" -> "2589 4418" [label="[1]", style=solid]; -"1641 2994" -> "2607 4374" [label="[1]", style=solid]; -"1641 2994" -> "2625 4330" [label="[1]", style=solid]; -"1641 2994" -> "2643 4286" [label="[1]", style=solid]; -"1641 2994" -> "2661 4242" [label="[1]", style=solid]; -"1641 2994" -> "2679 4198" [label="[1]", style=solid]; -"1641 2994" -> "2697 4154" [label="[1]", style=solid]; -"1641 2994" -> "2715 4110" [label="[1]", style=solid]; -"1641 2994" -> "2733 4066" [label="[1]", style=solid]; -"1641 2994" -> "2751 4022" [label="[1]", style=solid]; -"1641 2994" -> "2769 3978" [label="[1]", style=solid]; -"1641 2994" -> "2787 3934" [label="[1]", style=solid]; -"1641 2994" -> "2805 3890" [label="[1]", style=solid]; -"1641 2994" -> "2823 3846" [label="[1]", style=solid]; -"1641 2994" -> "2841 3802" [label="[1]", style=solid]; -"1641 2994" -> "2859 3758" [label="[1]", style=solid]; -"1641 2994" -> "2877 3714" [label="[1]", style=solid]; -"1641 2994" -> "2895 3670" [label="[1]", style=solid]; -"1641 2994" -> "2913 3626" [label="[1]", style=solid]; -"1641 2994" -> "2931 3582" [label="[1]", style=solid]; -"1641 2994" -> "2949 3538" [label="[1]", style=solid]; -"1641 2994" -> "2967 3494" [label="[1]", style=solid]; -"1641 2994" -> "2985 3450" [label="[1]", style=solid]; -"1641 2994" -> "3003 3406" [label="[1]", style=solid]; -"1641 2994" -> "3021 3362" [label="[1]", style=solid]; -"1641 2994" -> "3039 3318" [label="[1]", style=solid]; -"1641 2994" -> "3057 3274" [label="[1]", style=solid]; -"1641 2994" -> "3075 3230" [label="[1]", style=solid]; -"1641 2994" -> "3093 3186" [label="[1]", style=solid]; -"1641 2994" -> "3111 3142" [label="[1]", style=solid]; -"1641 2994" -> "3129 3098" [label="[1]", style=solid]; -"1641 2994" -> "3147 3054" [label="[1]", style=solid]; -"1641 2994" -> "3165 3010" [label="[1]", style=solid]; -"1642 2996" -> "1643 2997" [label="[1]", style=dashed]; -"1643 2997" -> "1644 6478" [label="[1]", style=dashed]; -"1643 2997" -> "1754 6434" [label="[1]", style=dashed]; -"1643 2997" -> "1772 6390" [label="[1]", style=dashed]; -"1643 2997" -> "1790 6346" [label="[1]", style=dashed]; -"1643 2997" -> "1808 6302" [label="[1]", style=dashed]; -"1643 2997" -> "1826 6258" [label="[1]", style=dashed]; -"1643 2997" -> "1844 6214" [label="[1]", style=dashed]; -"1643 2997" -> "1862 6170" [label="[1]", style=dashed]; -"1643 2997" -> "1880 6126" [label="[1]", style=dashed]; -"1643 2997" -> "1898 6082" [label="[1]", style=dashed]; -"1643 2997" -> "1916 6038" [label="[1]", style=dashed]; -"1643 2997" -> "1934 5994" [label="[1]", style=dashed]; -"1643 2997" -> "1952 5950" [label="[1]", style=dashed]; -"1643 2997" -> "1970 5906" [label="[1]", style=dashed]; -"1643 2997" -> "1988 5862" [label="[1]", style=dashed]; -"1643 2997" -> "2006 5818" [label="[1]", style=dashed]; -"1643 2997" -> "2024 5774" [label="[1]", style=dashed]; -"1643 2997" -> "2042 5730" [label="[1]", style=dashed]; -"1643 2997" -> "2060 5686" [label="[1]", style=dashed]; -"1643 2997" -> "2078 5642" [label="[1]", style=dashed]; -"1643 2997" -> "2096 5598" [label="[1]", style=dashed]; -"1643 2997" -> "2114 5554" [label="[1]", style=dashed]; -"1643 2997" -> "2132 5510" [label="[1]", style=dashed]; -"1643 2997" -> "2150 5466" [label="[1]", style=dashed]; -"1643 2997" -> "2168 5422" [label="[1]", style=dashed]; -"1643 2997" -> "2186 5378" [label="[1]", style=dashed]; -"1643 2997" -> "2204 5334" [label="[1]", style=dashed]; -"1643 2997" -> "2222 5290" [label="[1]", style=dashed]; -"1643 2997" -> "2240 5246" [label="[1]", style=dashed]; -"1643 2997" -> "2258 5202" [label="[1]", style=dashed]; -"1643 2997" -> "2276 5158" [label="[1]", style=dashed]; -"1643 2997" -> "2294 5114" [label="[1]", style=dashed]; -"1643 2997" -> "2312 5070" [label="[1]", style=dashed]; -"1643 2997" -> "2330 5026" [label="[1]", style=dashed]; -"1643 2997" -> "2348 4982" [label="[1]", style=dashed]; -"1643 2997" -> "2366 4938" [label="[1]", style=dashed]; -"1643 2997" -> "2384 4894" [label="[1]", style=dashed]; -"1643 2997" -> "2402 4850" [label="[1]", style=dashed]; -"1643 2997" -> "2420 4806" [label="[1]", style=dashed]; -"1643 2997" -> "2438 4762" [label="[1]", style=dashed]; -"1643 2997" -> "2456 4718" [label="[1]", style=dashed]; -"1643 2997" -> "2474 4674" [label="[1]", style=dashed]; -"1643 2997" -> "2492 4630" [label="[1]", style=dashed]; -"1643 2997" -> "2510 4586" [label="[1]", style=dashed]; -"1643 2997" -> "2528 4542" [label="[1]", style=dashed]; -"1643 2997" -> "2546 4498" [label="[1]", style=dashed]; -"1643 2997" -> "2564 4454" [label="[1]", style=dashed]; -"1643 2997" -> "2582 4410" [label="[1]", style=dashed]; -"1643 2997" -> "2600 4366" [label="[1]", style=dashed]; -"1643 2997" -> "2618 4322" [label="[1]", style=dashed]; -"1643 2997" -> "2636 4278" [label="[1]", style=dashed]; -"1643 2997" -> "2654 4234" [label="[1]", style=dashed]; -"1643 2997" -> "2672 4190" [label="[1]", style=dashed]; -"1643 2997" -> "2690 4146" [label="[1]", style=dashed]; -"1643 2997" -> "2708 4102" [label="[1]", style=dashed]; -"1643 2997" -> "2726 4058" [label="[1]", style=dashed]; -"1643 2997" -> "2744 4014" [label="[1]", style=dashed]; -"1643 2997" -> "2762 3970" [label="[1]", style=dashed]; -"1643 2997" -> "2780 3926" [label="[1]", style=dashed]; -"1643 2997" -> "2798 3882" [label="[1]", style=dashed]; -"1643 2997" -> "2816 3838" [label="[1]", style=dashed]; -"1643 2997" -> "2834 3794" [label="[1]", style=dashed]; -"1643 2997" -> "2852 3750" [label="[1]", style=dashed]; -"1643 2997" -> "2870 3706" [label="[1]", style=dashed]; -"1643 2997" -> "2888 3662" [label="[1]", style=dashed]; -"1643 2997" -> "2906 3618" [label="[1]", style=dashed]; -"1643 2997" -> "2924 3574" [label="[1]", style=dashed]; -"1643 2997" -> "2942 3530" [label="[1]", style=dashed]; -"1643 2997" -> "2960 3486" [label="[1]", style=dashed]; -"1643 2997" -> "2978 3442" [label="[1]", style=dashed]; -"1643 2997" -> "2996 3398" [label="[1]", style=dashed]; -"1643 2997" -> "3014 3354" [label="[1]", style=dashed]; -"1643 2997" -> "3032 3310" [label="[1]", style=dashed]; -"1643 2997" -> "3050 3266" [label="[1]", style=dashed]; -"1643 2997" -> "3068 3222" [label="[1]", style=dashed]; -"1643 2997" -> "3086 3178" [label="[1]", style=dashed]; -"1643 2997" -> "3104 3134" [label="[1]", style=dashed]; -"1643 2997" -> "3122 3090" [label="[1]", style=dashed]; -"1643 2997" -> "3140 3046" [label="[1]", style=dashed]; -"1643 2997" -> "3158 3002" [label="[1]", style=dashed]; -"1644 6478" -> "1645 6480" [label="[1]", style=dashed]; -"1645 6480" -> "1646 6481" [label="[1]", style=dashed]; -"1646 6481" -> "1647 6482" [label="[1]", style=solid]; -"1647 6482" -> "1648 6483" [label="[1, 1]", style=dashed]; -"1648 6483" -> "1649 6484" [label="[1, 1]", style=dashed]; -"1649 6484" -> "1650 6487" [label="[1]", style=dashed]; -"1649 6484" -> "1748 6495" [label="[1]", style=dashed]; -"1650 6487" -> "1652 6488" [label="[1]", style=dashed]; -"1651 6486" -> "1652 6488" [label="[1]", style=solid]; -"1652 6488" -> "1653 6497" [label="[1]", style=solid]; -"1652 6488" -> "1753 6508" [label="[1]", style=solid]; -"1653 6497" -> "1654 6498" [label="[1]", style=solid]; -"1654 6498" -> "1750 6501" [label="[1]", style=solid]; +"1640 2955" -> "1641 2994" [label="[-1]", style=solid]; +"1641 2994" -> "1642 2996" [label="[]", style=solid]; +"1641 2994" -> "1651 6486" [label="[]", style=solid]; +"1641 2994" -> "1761 6442" [label="[]", style=solid]; +"1641 2994" -> "1779 6398" [label="[]", style=solid]; +"1641 2994" -> "1797 6354" [label="[]", style=solid]; +"1641 2994" -> "1815 6310" [label="[]", style=solid]; +"1641 2994" -> "1833 6266" [label="[]", style=solid]; +"1641 2994" -> "1851 6222" [label="[]", style=solid]; +"1641 2994" -> "1869 6178" [label="[]", style=solid]; +"1641 2994" -> "1887 6134" [label="[]", style=solid]; +"1641 2994" -> "1905 6090" [label="[]", style=solid]; +"1641 2994" -> "1923 6046" [label="[]", style=solid]; +"1641 2994" -> "1941 6002" [label="[]", style=solid]; +"1641 2994" -> "1959 5958" [label="[]", style=solid]; +"1641 2994" -> "1977 5914" [label="[]", style=solid]; +"1641 2994" -> "1995 5870" [label="[]", style=solid]; +"1641 2994" -> "2013 5826" [label="[]", style=solid]; +"1641 2994" -> "2031 5782" [label="[]", style=solid]; +"1641 2994" -> "2049 5738" [label="[]", style=solid]; +"1641 2994" -> "2067 5694" [label="[]", style=solid]; +"1641 2994" -> "2085 5650" [label="[]", style=solid]; +"1641 2994" -> "2103 5606" [label="[]", style=solid]; +"1641 2994" -> "2121 5562" [label="[]", style=solid]; +"1641 2994" -> "2139 5518" [label="[]", style=solid]; +"1641 2994" -> "2157 5474" [label="[]", style=solid]; +"1641 2994" -> "2175 5430" [label="[]", style=solid]; +"1641 2994" -> "2193 5386" [label="[]", style=solid]; +"1641 2994" -> "2211 5342" [label="[]", style=solid]; +"1641 2994" -> "2229 5298" [label="[]", style=solid]; +"1641 2994" -> "2247 5254" [label="[]", style=solid]; +"1641 2994" -> "2265 5210" [label="[]", style=solid]; +"1641 2994" -> "2283 5166" [label="[]", style=solid]; +"1641 2994" -> "2301 5122" [label="[]", style=solid]; +"1641 2994" -> "2319 5078" [label="[]", style=solid]; +"1641 2994" -> "2337 5034" [label="[]", style=solid]; +"1641 2994" -> "2355 4990" [label="[]", style=solid]; +"1641 2994" -> "2373 4946" [label="[]", style=solid]; +"1641 2994" -> "2391 4902" [label="[]", style=solid]; +"1641 2994" -> "2409 4858" [label="[]", style=solid]; +"1641 2994" -> "2427 4814" [label="[]", style=solid]; +"1641 2994" -> "2445 4770" [label="[]", style=solid]; +"1641 2994" -> "2463 4726" [label="[]", style=solid]; +"1641 2994" -> "2481 4682" [label="[]", style=solid]; +"1641 2994" -> "2499 4638" [label="[]", style=solid]; +"1641 2994" -> "2517 4594" [label="[]", style=solid]; +"1641 2994" -> "2535 4550" [label="[]", style=solid]; +"1641 2994" -> "2553 4506" [label="[]", style=solid]; +"1641 2994" -> "2571 4462" [label="[]", style=solid]; +"1641 2994" -> "2589 4418" [label="[]", style=solid]; +"1641 2994" -> "2607 4374" [label="[]", style=solid]; +"1641 2994" -> "2625 4330" [label="[]", style=solid]; +"1641 2994" -> "2643 4286" [label="[]", style=solid]; +"1641 2994" -> "2661 4242" [label="[]", style=solid]; +"1641 2994" -> "2679 4198" [label="[]", style=solid]; +"1641 2994" -> "2697 4154" [label="[]", style=solid]; +"1641 2994" -> "2715 4110" [label="[]", style=solid]; +"1641 2994" -> "2733 4066" [label="[]", style=solid]; +"1641 2994" -> "2751 4022" [label="[]", style=solid]; +"1641 2994" -> "2769 3978" [label="[]", style=solid]; +"1641 2994" -> "2787 3934" [label="[]", style=solid]; +"1641 2994" -> "2805 3890" [label="[]", style=solid]; +"1641 2994" -> "2823 3846" [label="[]", style=solid]; +"1641 2994" -> "2841 3802" [label="[]", style=solid]; +"1641 2994" -> "2859 3758" [label="[]", style=solid]; +"1641 2994" -> "2877 3714" [label="[]", style=solid]; +"1641 2994" -> "2895 3670" [label="[]", style=solid]; +"1641 2994" -> "2913 3626" [label="[]", style=solid]; +"1641 2994" -> "2931 3582" [label="[]", style=solid]; +"1641 2994" -> "2949 3538" [label="[]", style=solid]; +"1641 2994" -> "2967 3494" [label="[]", style=solid]; +"1641 2994" -> "2985 3450" [label="[]", style=solid]; +"1641 2994" -> "3003 3406" [label="[]", style=solid]; +"1641 2994" -> "3021 3362" [label="[]", style=solid]; +"1641 2994" -> "3039 3318" [label="[]", style=solid]; +"1641 2994" -> "3057 3274" [label="[]", style=solid]; +"1641 2994" -> "3075 3230" [label="[]", style=solid]; +"1641 2994" -> "3093 3186" [label="[]", style=solid]; +"1641 2994" -> "3111 3142" [label="[]", style=solid]; +"1641 2994" -> "3129 3098" [label="[]", style=solid]; +"1641 2994" -> "3147 3054" [label="[]", style=solid]; +"1641 2994" -> "3165 3010" [label="[]", style=solid]; +"1642 2996" -> "1643 2997" [label="[]", style=dashed]; +"1643 2997" -> "1644 6478" [label="[]", style=dashed]; +"1643 2997" -> "1754 6434" [label="[]", style=dashed]; +"1643 2997" -> "1772 6390" [label="[]", style=dashed]; +"1643 2997" -> "1790 6346" [label="[]", style=dashed]; +"1643 2997" -> "1808 6302" [label="[]", style=dashed]; +"1643 2997" -> "1826 6258" [label="[]", style=dashed]; +"1643 2997" -> "1844 6214" [label="[]", style=dashed]; +"1643 2997" -> "1862 6170" [label="[]", style=dashed]; +"1643 2997" -> "1880 6126" [label="[]", style=dashed]; +"1643 2997" -> "1898 6082" [label="[]", style=dashed]; +"1643 2997" -> "1916 6038" [label="[]", style=dashed]; +"1643 2997" -> "1934 5994" [label="[]", style=dashed]; +"1643 2997" -> "1952 5950" [label="[]", style=dashed]; +"1643 2997" -> "1970 5906" [label="[]", style=dashed]; +"1643 2997" -> "1988 5862" [label="[]", style=dashed]; +"1643 2997" -> "2006 5818" [label="[]", style=dashed]; +"1643 2997" -> "2024 5774" [label="[]", style=dashed]; +"1643 2997" -> "2042 5730" [label="[]", style=dashed]; +"1643 2997" -> "2060 5686" [label="[]", style=dashed]; +"1643 2997" -> "2078 5642" [label="[]", style=dashed]; +"1643 2997" -> "2096 5598" [label="[]", style=dashed]; +"1643 2997" -> "2114 5554" [label="[]", style=dashed]; +"1643 2997" -> "2132 5510" [label="[]", style=dashed]; +"1643 2997" -> "2150 5466" [label="[]", style=dashed]; +"1643 2997" -> "2168 5422" [label="[]", style=dashed]; +"1643 2997" -> "2186 5378" [label="[]", style=dashed]; +"1643 2997" -> "2204 5334" [label="[]", style=dashed]; +"1643 2997" -> "2222 5290" [label="[]", style=dashed]; +"1643 2997" -> "2240 5246" [label="[]", style=dashed]; +"1643 2997" -> "2258 5202" [label="[]", style=dashed]; +"1643 2997" -> "2276 5158" [label="[]", style=dashed]; +"1643 2997" -> "2294 5114" [label="[]", style=dashed]; +"1643 2997" -> "2312 5070" [label="[]", style=dashed]; +"1643 2997" -> "2330 5026" [label="[]", style=dashed]; +"1643 2997" -> "2348 4982" [label="[]", style=dashed]; +"1643 2997" -> "2366 4938" [label="[]", style=dashed]; +"1643 2997" -> "2384 4894" [label="[]", style=dashed]; +"1643 2997" -> "2402 4850" [label="[]", style=dashed]; +"1643 2997" -> "2420 4806" [label="[]", style=dashed]; +"1643 2997" -> "2438 4762" [label="[]", style=dashed]; +"1643 2997" -> "2456 4718" [label="[]", style=dashed]; +"1643 2997" -> "2474 4674" [label="[]", style=dashed]; +"1643 2997" -> "2492 4630" [label="[]", style=dashed]; +"1643 2997" -> "2510 4586" [label="[]", style=dashed]; +"1643 2997" -> "2528 4542" [label="[]", style=dashed]; +"1643 2997" -> "2546 4498" [label="[]", style=dashed]; +"1643 2997" -> "2564 4454" [label="[]", style=dashed]; +"1643 2997" -> "2582 4410" [label="[]", style=dashed]; +"1643 2997" -> "2600 4366" [label="[]", style=dashed]; +"1643 2997" -> "2618 4322" [label="[]", style=dashed]; +"1643 2997" -> "2636 4278" [label="[]", style=dashed]; +"1643 2997" -> "2654 4234" [label="[]", style=dashed]; +"1643 2997" -> "2672 4190" [label="[]", style=dashed]; +"1643 2997" -> "2690 4146" [label="[]", style=dashed]; +"1643 2997" -> "2708 4102" [label="[]", style=dashed]; +"1643 2997" -> "2726 4058" [label="[]", style=dashed]; +"1643 2997" -> "2744 4014" [label="[]", style=dashed]; +"1643 2997" -> "2762 3970" [label="[]", style=dashed]; +"1643 2997" -> "2780 3926" [label="[]", style=dashed]; +"1643 2997" -> "2798 3882" [label="[]", style=dashed]; +"1643 2997" -> "2816 3838" [label="[]", style=dashed]; +"1643 2997" -> "2834 3794" [label="[]", style=dashed]; +"1643 2997" -> "2852 3750" [label="[]", style=dashed]; +"1643 2997" -> "2870 3706" [label="[]", style=dashed]; +"1643 2997" -> "2888 3662" [label="[]", style=dashed]; +"1643 2997" -> "2906 3618" [label="[]", style=dashed]; +"1643 2997" -> "2924 3574" [label="[]", style=dashed]; +"1643 2997" -> "2942 3530" [label="[]", style=dashed]; +"1643 2997" -> "2960 3486" [label="[]", style=dashed]; +"1643 2997" -> "2978 3442" [label="[]", style=dashed]; +"1643 2997" -> "2996 3398" [label="[]", style=dashed]; +"1643 2997" -> "3014 3354" [label="[]", style=dashed]; +"1643 2997" -> "3032 3310" [label="[]", style=dashed]; +"1643 2997" -> "3050 3266" [label="[]", style=dashed]; +"1643 2997" -> "3068 3222" [label="[]", style=dashed]; +"1643 2997" -> "3086 3178" [label="[]", style=dashed]; +"1643 2997" -> "3104 3134" [label="[]", style=dashed]; +"1643 2997" -> "3122 3090" [label="[]", style=dashed]; +"1643 2997" -> "3140 3046" [label="[]", style=dashed]; +"1643 2997" -> "3158 3002" [label="[]", style=dashed]; +"1644 6478" -> "1645 6480" [label="[]", style=dashed]; +"1645 6480" -> "1646 6481" [label="[]", style=dashed]; +"1646 6481" -> "1647 6482" [label="[]", style=solid]; +"1647 6482" -> "1648 6483" [label="[-1, -1]", style=dashed]; +"1648 6483" -> "1649 6484" [label="[-1, -1]", style=dashed]; +"1649 6484" -> "1650 6487" [label="[-1]", style=dashed]; +"1649 6484" -> "1748 6495" [label="[-1]", style=dashed]; +"1650 6487" -> "1652 6488" [label="[-1]", style=dashed]; +"1651 6486" -> "1652 6488" [label="[]", style=solid]; +"1652 6488" -> "1653 6497" [label="[]", style=solid]; +"1652 6488" -> "1753 6508" [label="[]", style=solid]; +"1653 6497" -> "1654 6498" [label="[]", style=solid]; +"1654 6498" -> "1750 6501" [label="[]", style=solid]; "1655 2984" -> "1656 2987" [label="[]", style=dashed]; "1656 2987" -> "1658 2988" [label="[1]", style=dashed]; "1657 2986" -> "1658 2988" [label="[1]", style=dashed]; "1658 2988" -> "1746 2989" [label="[2]", style=dashed]; -"1659 2773_MatMul" -> "1660 2773_Add" [label="[1]", style=solid]; -"1660 2773_Add" -> "1661 2776" [label="[1]", style=solid]; -"1661 2776" -> "1662 2947" [label="[1]", style=solid]; -"1661 2776" -> "1673 2872" [label="[1]", style=solid]; -"1661 2776" -> "1688 2848" [label="[1]", style=solid]; -"1661 2776" -> "1704 2860" [label="[1]", style=solid]; -"1661 2776" -> "1719 2836" [label="[1]", style=solid]; -"1662 2947" -> "1734 2948" [label="[1]", style=dashed]; -"1663 2775" -> "1664 2777" [label="[1]", style=solid]; -"1664 2777" -> "1665 2806" [label="[1]", style=solid]; -"1664 2777" -> "1667 2799" [label="[1]", style=solid]; -"1664 2777" -> "1681 2826" [label="[1]", style=solid]; -"1664 2777" -> "1696 2789" [label="[1]", style=solid]; -"1664 2777" -> "1698 2782" [label="[1]", style=solid]; -"1664 2777" -> "1712 2816" [label="[1]", style=solid]; -"1665 2806" -> "1666 2808" [label="[1]", style=solid]; -"1666 2808" -> "1669 2809" [label="[1]", style=solid]; -"1667 2799" -> "1668 2801" [label="[1]", style=solid]; -"1668 2801" -> "1669 2809" [label="[1]", style=solid]; -"1669 2809" -> "1670 2811" [label="[1]", style=solid]; -"1670 2811" -> "1671 2923" [label="[1]", style=solid]; -"1670 2811" -> "1680 2830" [label="[1]", style=solid]; -"1670 2811" -> "1686 2900" [label="[1]", style=solid]; -"1671 2923" -> "1672 2924" [label="[1]", style=solid]; -"1672 2924" -> "1678 2925" [label="[1]", style=solid]; -"1673 2872" -> "1674 2877" [label="[1]", style=solid]; -"1674 2877" -> "1675 2879" [label="[1]", style=solid]; -"1675 2879" -> "1676 2881" [label="[1]", style=solid]; -"1676 2881" -> "1677 2918" [label="[1]", style=solid]; -"1677 2918" -> "1678 2925" [label="[1]", style=solid]; -"1678 2925" -> "1679 2938" [label="[1]", style=solid]; -"1678 2925" -> "1727 2930" [label="[1]", style=solid]; -"1679 2938" -> "1693 2939" [label="[1]", style=solid]; -"1680 2830" -> "1683 2831" [label="[1]", style=solid]; -"1681 2826" -> "1682 2828" [label="[1]", style=solid]; -"1682 2828" -> "1683 2831" [label="[1]", style=solid]; -"1683 2831" -> "1684 2907" [label="[1]", style=solid]; -"1684 2907" -> "1685 2908" [label="[1]", style=solid]; -"1685 2908" -> "1692 2909" [label="[1]", style=solid]; -"1686 2900" -> "1687 2901" [label="[1]", style=solid]; -"1687 2901" -> "1691 2902" [label="[1]", style=solid]; -"1688 2848" -> "1689 2853" [label="[1]", style=solid]; -"1689 2853" -> "1690 2855" [label="[1]", style=solid]; -"1690 2855" -> "1691 2902" [label="[1]", style=solid]; -"1691 2902" -> "1692 2909" [label="[1]", style=solid]; -"1692 2909" -> "1693 2939" [label="[1]", style=solid]; -"1692 2909" -> "1728 2931" [label="[1]", style=solid]; -"1693 2939" -> "1694 2941" [label="[1]", style=solid]; -"1694 2941" -> "1695 2945" [label="[1]", style=solid]; -"1695 2945" -> "1733 2946" [label="[1]", style=solid]; -"1696 2789" -> "1697 2791" [label="[1]", style=solid]; -"1697 2791" -> "1700 2792" [label="[1]", style=solid]; -"1698 2782" -> "1699 2784" [label="[1]", style=solid]; -"1699 2784" -> "1700 2792" [label="[1]", style=solid]; -"1700 2792" -> "1701 2794" [label="[1]", style=solid]; -"1701 2794" -> "1702 2915" [label="[1]", style=solid]; -"1701 2794" -> "1711 2820" [label="[1]", style=solid]; -"1701 2794" -> "1717 2886" [label="[1]", style=solid]; -"1702 2915" -> "1703 2916" [label="[1]", style=solid]; -"1703 2916" -> "1709 2917" [label="[1]", style=solid]; -"1704 2860" -> "1705 2865" [label="[1]", style=solid]; -"1705 2865" -> "1706 2867" [label="[1]", style=solid]; -"1706 2867" -> "1707 2880" [label="[1]", style=solid]; -"1707 2880" -> "1708 2910" [label="[1]", style=solid]; -"1708 2910" -> "1709 2917" [label="[1]", style=solid]; -"1709 2917" -> "1710 2933" [label="[1]", style=solid]; -"1709 2917" -> "1730 2927" [label="[1]", style=solid]; -"1710 2933" -> "1724 2934" [label="[1]", style=solid]; -"1711 2820" -> "1714 2821" [label="[1]", style=solid]; -"1712 2816" -> "1713 2818" [label="[1]", style=solid]; -"1713 2818" -> "1714 2821" [label="[1]", style=solid]; -"1714 2821" -> "1715 2893" [label="[1]", style=solid]; -"1715 2893" -> "1716 2894" [label="[1]", style=solid]; -"1716 2894" -> "1723 2895" [label="[1]", style=solid]; -"1717 2886" -> "1718 2887" [label="[1]", style=solid]; -"1718 2887" -> "1722 2888" [label="[1]", style=solid]; -"1719 2836" -> "1720 2841" [label="[1]", style=solid]; -"1720 2841" -> "1721 2843" [label="[1]", style=solid]; -"1721 2843" -> "1722 2888" [label="[1]", style=solid]; -"1722 2888" -> "1723 2895" [label="[1]", style=solid]; -"1723 2895" -> "1724 2934" [label="[1]", style=solid]; -"1723 2895" -> "1731 2928" [label="[1]", style=solid]; -"1724 2934" -> "1725 2936" [label="[1]", style=solid]; -"1725 2936" -> "1726 2944" [label="[1]", style=solid]; -"1726 2944" -> "1733 2946" [label="[1]", style=solid]; -"1727 2930" -> "1728 2931" [label="[1]", style=solid]; -"1728 2931" -> "1729 2943" [label="[1]", style=solid]; -"1729 2943" -> "1733 2946" [label="[1]", style=solid]; -"1730 2927" -> "1731 2928" [label="[1]", style=solid]; -"1731 2928" -> "1732 2942" [label="[1]", style=solid]; -"1732 2942" -> "1733 2946" [label="[1]", style=solid]; -"1733 2946" -> "1734 2948" [label="[1]", style=solid]; -"1734 2948" -> "1735 2953" [label="[1]", style=solid]; -"1735 2953" -> "1736 2971" [label="[1, 4]", style=solid]; -"1735 2953" -> "1740 2960" [label="[1, 4]", style=solid]; -"1736 2971" -> "1737 2976" [label="[1, 4]", style=solid]; -"1737 2976" -> "1738 2977" [label="[1, 2]", style=solid]; -"1738 2977" -> "1739 2979" [label="[1, 2]", style=solid]; -"1739 2979" -> "1744 2980" [label="[1, 2, 1]", style=solid]; -"1740 2960" -> "1741 2965" [label="[1, 4]", style=solid]; -"1741 2965" -> "1742 2966" [label="[1, 2]", style=solid]; -"1742 2966" -> "1743 2978" [label="[1, 2]", style=solid]; -"1743 2978" -> "1744 2980" [label="[1, 2, 1]", style=solid]; -"1744 2980" -> "1745 2982" [label="[1, 2, 2]", style=solid]; -"1745 2982" -> "1746 2989" [label="[1, 4]", style=solid]; -"1746 2989" -> "1747 6493" [label="[1]", style=solid]; -"1746 2989" -> "1765 6449" [label="[1]", style=solid]; -"1746 2989" -> "1783 6405" [label="[1]", style=solid]; -"1746 2989" -> "1801 6361" [label="[1]", style=solid]; -"1746 2989" -> "1819 6317" [label="[1]", style=solid]; -"1746 2989" -> "1837 6273" [label="[1]", style=solid]; -"1746 2989" -> "1855 6229" [label="[1]", style=solid]; -"1746 2989" -> "1873 6185" [label="[1]", style=solid]; -"1746 2989" -> "1891 6141" [label="[1]", style=solid]; -"1746 2989" -> "1909 6097" [label="[1]", style=solid]; -"1746 2989" -> "1927 6053" [label="[1]", style=solid]; -"1746 2989" -> "1945 6009" [label="[1]", style=solid]; -"1746 2989" -> "1963 5965" [label="[1]", style=solid]; -"1746 2989" -> "1981 5921" [label="[1]", style=solid]; -"1746 2989" -> "1999 5877" [label="[1]", style=solid]; -"1746 2989" -> "2017 5833" [label="[1]", style=solid]; -"1746 2989" -> "2035 5789" [label="[1]", style=solid]; -"1746 2989" -> "2053 5745" [label="[1]", style=solid]; -"1746 2989" -> "2071 5701" [label="[1]", style=solid]; -"1746 2989" -> "2089 5657" [label="[1]", style=solid]; -"1746 2989" -> "2107 5613" [label="[1]", style=solid]; -"1746 2989" -> "2125 5569" [label="[1]", style=solid]; -"1746 2989" -> "2143 5525" [label="[1]", style=solid]; -"1746 2989" -> "2161 5481" [label="[1]", style=solid]; -"1746 2989" -> "2179 5437" [label="[1]", style=solid]; -"1746 2989" -> "2197 5393" [label="[1]", style=solid]; -"1746 2989" -> "2215 5349" [label="[1]", style=solid]; -"1746 2989" -> "2233 5305" [label="[1]", style=solid]; -"1746 2989" -> "2251 5261" [label="[1]", style=solid]; -"1746 2989" -> "2269 5217" [label="[1]", style=solid]; -"1746 2989" -> "2287 5173" [label="[1]", style=solid]; -"1746 2989" -> "2305 5129" [label="[1]", style=solid]; -"1746 2989" -> "2323 5085" [label="[1]", style=solid]; -"1746 2989" -> "2341 5041" [label="[1]", style=solid]; -"1746 2989" -> "2359 4997" [label="[1]", style=solid]; -"1746 2989" -> "2377 4953" [label="[1]", style=solid]; -"1746 2989" -> "2395 4909" [label="[1]", style=solid]; -"1746 2989" -> "2413 4865" [label="[1]", style=solid]; -"1746 2989" -> "2431 4821" [label="[1]", style=solid]; -"1746 2989" -> "2449 4777" [label="[1]", style=solid]; -"1746 2989" -> "2467 4733" [label="[1]", style=solid]; -"1746 2989" -> "2485 4689" [label="[1]", style=solid]; -"1746 2989" -> "2503 4645" [label="[1]", style=solid]; -"1746 2989" -> "2521 4601" [label="[1]", style=solid]; -"1746 2989" -> "2539 4557" [label="[1]", style=solid]; -"1746 2989" -> "2557 4513" [label="[1]", style=solid]; -"1746 2989" -> "2575 4469" [label="[1]", style=solid]; -"1746 2989" -> "2593 4425" [label="[1]", style=solid]; -"1746 2989" -> "2611 4381" [label="[1]", style=solid]; -"1746 2989" -> "2629 4337" [label="[1]", style=solid]; -"1746 2989" -> "2647 4293" [label="[1]", style=solid]; -"1746 2989" -> "2665 4249" [label="[1]", style=solid]; -"1746 2989" -> "2683 4205" [label="[1]", style=solid]; -"1746 2989" -> "2701 4161" [label="[1]", style=solid]; -"1746 2989" -> "2719 4117" [label="[1]", style=solid]; -"1746 2989" -> "2737 4073" [label="[1]", style=solid]; -"1746 2989" -> "2755 4029" [label="[1]", style=solid]; -"1746 2989" -> "2773 3985" [label="[1]", style=solid]; -"1746 2989" -> "2791 3941" [label="[1]", style=solid]; -"1746 2989" -> "2809 3897" [label="[1]", style=solid]; -"1746 2989" -> "2827 3853" [label="[1]", style=solid]; -"1746 2989" -> "2845 3809" [label="[1]", style=solid]; -"1746 2989" -> "2863 3765" [label="[1]", style=solid]; -"1746 2989" -> "2881 3721" [label="[1]", style=solid]; -"1746 2989" -> "2899 3677" [label="[1]", style=solid]; -"1746 2989" -> "2917 3633" [label="[1]", style=solid]; -"1746 2989" -> "2935 3589" [label="[1]", style=solid]; -"1746 2989" -> "2953 3545" [label="[1]", style=solid]; -"1746 2989" -> "2971 3501" [label="[1]", style=solid]; -"1746 2989" -> "2989 3457" [label="[1]", style=solid]; -"1746 2989" -> "3007 3413" [label="[1]", style=solid]; -"1746 2989" -> "3025 3369" [label="[1]", style=solid]; -"1746 2989" -> "3043 3325" [label="[1]", style=solid]; -"1746 2989" -> "3061 3281" [label="[1]", style=solid]; -"1746 2989" -> "3079 3237" [label="[1]", style=solid]; -"1746 2989" -> "3097 3193" [label="[1]", style=solid]; -"1746 2989" -> "3115 3149" [label="[1]", style=solid]; -"1746 2989" -> "3133 3105" [label="[1]", style=solid]; -"1746 2989" -> "3151 3061" [label="[1]", style=solid]; -"1746 2989" -> "3169 3017" [label="[1]", style=solid]; -"1747 6493" -> "1748 6495" [label="[1]", style=solid]; -"1748 6495" -> "1749 6496" [label="[1]", style=solid]; -"1748 6495" -> "3186 6506" [label="[1]", style=solid]; -"1749 6496" -> "1750 6501" [label="[1]", style=solid]; -"1750 6501" -> "1751 6503" [label="[1, 3]", style=dashed]; -"1751 6503" -> "1752 6504" [label="[1, 1]", style=dashed]; -"1752 6504" -> "1753 6508" [label="[1]", style=dashed]; -"1752 6504" -> "3185 6505" [label="[1]", style=dashed]; -"1753 6508" -> "3176 6520" [label="[1]", style=solid]; -"1754 6434" -> "1755 6436" [label="[1]", style=dashed]; -"1755 6436" -> "1756 6437" [label="[1]", style=dashed]; -"1756 6437" -> "1757 6438" [label="[1]", style=solid]; -"1757 6438" -> "1758 6439" [label="[1, 1]", style=dashed]; -"1758 6439" -> "1759 6440" [label="[1, 1]", style=dashed]; -"1759 6440" -> "1760 6443" [label="[1]", style=dashed]; -"1759 6440" -> "1766 6451" [label="[1]", style=dashed]; -"1760 6443" -> "1762 6444" [label="[1]", style=dashed]; -"1761 6442" -> "1762 6444" [label="[1]", style=solid]; -"1762 6444" -> "1763 6453" [label="[1]", style=solid]; -"1762 6444" -> "1771 6464" [label="[1]", style=solid]; -"1763 6453" -> "1764 6454" [label="[1]", style=solid]; -"1764 6454" -> "1768 6457" [label="[1]", style=solid]; -"1765 6449" -> "1766 6451" [label="[1]", style=solid]; -"1766 6451" -> "1767 6452" [label="[1]", style=solid]; -"1766 6451" -> "3188 6462" [label="[1]", style=solid]; -"1767 6452" -> "1768 6457" [label="[1]", style=solid]; -"1768 6457" -> "1769 6459" [label="[1, 3]", style=dashed]; -"1769 6459" -> "1770 6460" [label="[1, 1]", style=dashed]; -"1770 6460" -> "1771 6464" [label="[1]", style=dashed]; -"1770 6460" -> "3187 6461" [label="[1]", style=dashed]; -"1771 6464" -> "3176 6520" [label="[1]", style=solid]; -"1772 6390" -> "1773 6392" [label="[1]", style=dashed]; -"1773 6392" -> "1774 6393" [label="[1]", style=dashed]; -"1774 6393" -> "1775 6394" [label="[1]", style=solid]; -"1775 6394" -> "1776 6395" [label="[1, 1]", style=dashed]; -"1776 6395" -> "1777 6396" [label="[1, 1]", style=dashed]; -"1777 6396" -> "1778 6399" [label="[1]", style=dashed]; -"1777 6396" -> "1784 6407" [label="[1]", style=dashed]; -"1778 6399" -> "1780 6400" [label="[1]", style=dashed]; -"1779 6398" -> "1780 6400" [label="[1]", style=solid]; -"1780 6400" -> "1781 6409" [label="[1]", style=solid]; -"1780 6400" -> "1789 6420" [label="[1]", style=solid]; -"1781 6409" -> "1782 6410" [label="[1]", style=solid]; -"1782 6410" -> "1786 6413" [label="[1]", style=solid]; -"1783 6405" -> "1784 6407" [label="[1]", style=solid]; -"1784 6407" -> "1785 6408" [label="[1]", style=solid]; -"1784 6407" -> "3190 6418" [label="[1]", style=solid]; -"1785 6408" -> "1786 6413" [label="[1]", style=solid]; -"1786 6413" -> "1787 6415" [label="[1, 3]", style=dashed]; -"1787 6415" -> "1788 6416" [label="[1, 1]", style=dashed]; -"1788 6416" -> "1789 6420" [label="[1]", style=dashed]; -"1788 6416" -> "3189 6417" [label="[1]", style=dashed]; -"1789 6420" -> "3176 6520" [label="[1]", style=solid]; -"1790 6346" -> "1791 6348" [label="[1]", style=dashed]; -"1791 6348" -> "1792 6349" [label="[1]", style=dashed]; -"1792 6349" -> "1793 6350" [label="[1]", style=solid]; -"1793 6350" -> "1794 6351" [label="[1, 1]", style=dashed]; -"1794 6351" -> "1795 6352" [label="[1, 1]", style=dashed]; -"1795 6352" -> "1796 6355" [label="[1]", style=dashed]; -"1795 6352" -> "1802 6363" [label="[1]", style=dashed]; -"1796 6355" -> "1798 6356" [label="[1]", style=dashed]; -"1797 6354" -> "1798 6356" [label="[1]", style=solid]; -"1798 6356" -> "1799 6365" [label="[1]", style=solid]; -"1798 6356" -> "1807 6376" [label="[1]", style=solid]; -"1799 6365" -> "1800 6366" [label="[1]", style=solid]; -"1800 6366" -> "1804 6369" [label="[1]", style=solid]; -"1801 6361" -> "1802 6363" [label="[1]", style=solid]; -"1802 6363" -> "1803 6364" [label="[1]", style=solid]; -"1802 6363" -> "3192 6374" [label="[1]", style=solid]; -"1803 6364" -> "1804 6369" [label="[1]", style=solid]; -"1804 6369" -> "1805 6371" [label="[1, 3]", style=dashed]; -"1805 6371" -> "1806 6372" [label="[1, 1]", style=dashed]; -"1806 6372" -> "1807 6376" [label="[1]", style=dashed]; -"1806 6372" -> "3191 6373" [label="[1]", style=dashed]; -"1807 6376" -> "3176 6520" [label="[1]", style=solid]; -"1808 6302" -> "1809 6304" [label="[1]", style=dashed]; -"1809 6304" -> "1810 6305" [label="[1]", style=dashed]; -"1810 6305" -> "1811 6306" [label="[1]", style=solid]; -"1811 6306" -> "1812 6307" [label="[1, 1]", style=dashed]; -"1812 6307" -> "1813 6308" [label="[1, 1]", style=dashed]; -"1813 6308" -> "1814 6311" [label="[1]", style=dashed]; -"1813 6308" -> "1820 6319" [label="[1]", style=dashed]; -"1814 6311" -> "1816 6312" [label="[1]", style=dashed]; -"1815 6310" -> "1816 6312" [label="[1]", style=solid]; -"1816 6312" -> "1817 6321" [label="[1]", style=solid]; -"1816 6312" -> "1825 6332" [label="[1]", style=solid]; -"1817 6321" -> "1818 6322" [label="[1]", style=solid]; -"1818 6322" -> "1822 6325" [label="[1]", style=solid]; -"1819 6317" -> "1820 6319" [label="[1]", style=solid]; -"1820 6319" -> "1821 6320" [label="[1]", style=solid]; -"1820 6319" -> "3194 6330" [label="[1]", style=solid]; -"1821 6320" -> "1822 6325" [label="[1]", style=solid]; -"1822 6325" -> "1823 6327" [label="[1, 3]", style=dashed]; -"1823 6327" -> "1824 6328" [label="[1, 1]", style=dashed]; -"1824 6328" -> "1825 6332" [label="[1]", style=dashed]; -"1824 6328" -> "3193 6329" [label="[1]", style=dashed]; -"1825 6332" -> "3176 6520" [label="[1]", style=solid]; -"1826 6258" -> "1827 6260" [label="[1]", style=dashed]; -"1827 6260" -> "1828 6261" [label="[1]", style=dashed]; -"1828 6261" -> "1829 6262" [label="[1]", style=solid]; -"1829 6262" -> "1830 6263" [label="[1, 1]", style=dashed]; -"1830 6263" -> "1831 6264" [label="[1, 1]", style=dashed]; -"1831 6264" -> "1832 6267" [label="[1]", style=dashed]; -"1831 6264" -> "1838 6275" [label="[1]", style=dashed]; -"1832 6267" -> "1834 6268" [label="[1]", style=dashed]; -"1833 6266" -> "1834 6268" [label="[1]", style=solid]; -"1834 6268" -> "1835 6277" [label="[1]", style=solid]; -"1834 6268" -> "1843 6288" [label="[1]", style=solid]; -"1835 6277" -> "1836 6278" [label="[1]", style=solid]; -"1836 6278" -> "1840 6281" [label="[1]", style=solid]; -"1837 6273" -> "1838 6275" [label="[1]", style=solid]; -"1838 6275" -> "1839 6276" [label="[1]", style=solid]; -"1838 6275" -> "3196 6286" [label="[1]", style=solid]; -"1839 6276" -> "1840 6281" [label="[1]", style=solid]; -"1840 6281" -> "1841 6283" [label="[1, 3]", style=dashed]; -"1841 6283" -> "1842 6284" [label="[1, 1]", style=dashed]; -"1842 6284" -> "1843 6288" [label="[1]", style=dashed]; -"1842 6284" -> "3195 6285" [label="[1]", style=dashed]; -"1843 6288" -> "3176 6520" [label="[1]", style=solid]; -"1844 6214" -> "1845 6216" [label="[1]", style=dashed]; -"1845 6216" -> "1846 6217" [label="[1]", style=dashed]; -"1846 6217" -> "1847 6218" [label="[1]", style=solid]; -"1847 6218" -> "1848 6219" [label="[1, 1]", style=dashed]; -"1848 6219" -> "1849 6220" [label="[1, 1]", style=dashed]; -"1849 6220" -> "1850 6223" [label="[1]", style=dashed]; -"1849 6220" -> "1856 6231" [label="[1]", style=dashed]; -"1850 6223" -> "1852 6224" [label="[1]", style=dashed]; -"1851 6222" -> "1852 6224" [label="[1]", style=solid]; -"1852 6224" -> "1853 6233" [label="[1]", style=solid]; -"1852 6224" -> "1861 6244" [label="[1]", style=solid]; -"1853 6233" -> "1854 6234" [label="[1]", style=solid]; -"1854 6234" -> "1858 6237" [label="[1]", style=solid]; -"1855 6229" -> "1856 6231" [label="[1]", style=solid]; -"1856 6231" -> "1857 6232" [label="[1]", style=solid]; -"1856 6231" -> "3198 6242" [label="[1]", style=solid]; -"1857 6232" -> "1858 6237" [label="[1]", style=solid]; -"1858 6237" -> "1859 6239" [label="[1, 3]", style=dashed]; -"1859 6239" -> "1860 6240" [label="[1, 1]", style=dashed]; -"1860 6240" -> "1861 6244" [label="[1]", style=dashed]; -"1860 6240" -> "3197 6241" [label="[1]", style=dashed]; -"1861 6244" -> "3176 6520" [label="[1]", style=solid]; -"1862 6170" -> "1863 6172" [label="[1]", style=dashed]; -"1863 6172" -> "1864 6173" [label="[1]", style=dashed]; -"1864 6173" -> "1865 6174" [label="[1]", style=solid]; -"1865 6174" -> "1866 6175" [label="[1, 1]", style=dashed]; -"1866 6175" -> "1867 6176" [label="[1, 1]", style=dashed]; -"1867 6176" -> "1868 6179" [label="[1]", style=dashed]; -"1867 6176" -> "1874 6187" [label="[1]", style=dashed]; -"1868 6179" -> "1870 6180" [label="[1]", style=dashed]; -"1869 6178" -> "1870 6180" [label="[1]", style=solid]; -"1870 6180" -> "1871 6189" [label="[1]", style=solid]; -"1870 6180" -> "1879 6200" [label="[1]", style=solid]; -"1871 6189" -> "1872 6190" [label="[1]", style=solid]; -"1872 6190" -> "1876 6193" [label="[1]", style=solid]; -"1873 6185" -> "1874 6187" [label="[1]", style=solid]; -"1874 6187" -> "1875 6188" [label="[1]", style=solid]; -"1874 6187" -> "3200 6198" [label="[1]", style=solid]; -"1875 6188" -> "1876 6193" [label="[1]", style=solid]; -"1876 6193" -> "1877 6195" [label="[1, 3]", style=dashed]; -"1877 6195" -> "1878 6196" [label="[1, 1]", style=dashed]; -"1878 6196" -> "1879 6200" [label="[1]", style=dashed]; -"1878 6196" -> "3199 6197" [label="[1]", style=dashed]; -"1879 6200" -> "3176 6520" [label="[1]", style=solid]; -"1880 6126" -> "1881 6128" [label="[1]", style=dashed]; -"1881 6128" -> "1882 6129" [label="[1]", style=dashed]; -"1882 6129" -> "1883 6130" [label="[1]", style=solid]; -"1883 6130" -> "1884 6131" [label="[1, 1]", style=dashed]; -"1884 6131" -> "1885 6132" [label="[1, 1]", style=dashed]; -"1885 6132" -> "1886 6135" [label="[1]", style=dashed]; -"1885 6132" -> "1892 6143" [label="[1]", style=dashed]; -"1886 6135" -> "1888 6136" [label="[1]", style=dashed]; -"1887 6134" -> "1888 6136" [label="[1]", style=solid]; -"1888 6136" -> "1889 6145" [label="[1]", style=solid]; -"1888 6136" -> "1897 6156" [label="[1]", style=solid]; -"1889 6145" -> "1890 6146" [label="[1]", style=solid]; -"1890 6146" -> "1894 6149" [label="[1]", style=solid]; -"1891 6141" -> "1892 6143" [label="[1]", style=solid]; -"1892 6143" -> "1893 6144" [label="[1]", style=solid]; -"1892 6143" -> "3202 6154" [label="[1]", style=solid]; -"1893 6144" -> "1894 6149" [label="[1]", style=solid]; -"1894 6149" -> "1895 6151" [label="[1, 3]", style=dashed]; -"1895 6151" -> "1896 6152" [label="[1, 1]", style=dashed]; -"1896 6152" -> "1897 6156" [label="[1]", style=dashed]; -"1896 6152" -> "3201 6153" [label="[1]", style=dashed]; -"1897 6156" -> "3176 6520" [label="[1]", style=solid]; -"1898 6082" -> "1899 6084" [label="[1]", style=dashed]; -"1899 6084" -> "1900 6085" [label="[1]", style=dashed]; -"1900 6085" -> "1901 6086" [label="[1]", style=solid]; -"1901 6086" -> "1902 6087" [label="[1, 1]", style=dashed]; -"1902 6087" -> "1903 6088" [label="[1, 1]", style=dashed]; -"1903 6088" -> "1904 6091" [label="[1]", style=dashed]; -"1903 6088" -> "1910 6099" [label="[1]", style=dashed]; -"1904 6091" -> "1906 6092" [label="[1]", style=dashed]; -"1905 6090" -> "1906 6092" [label="[1]", style=solid]; -"1906 6092" -> "1907 6101" [label="[1]", style=solid]; -"1906 6092" -> "1915 6112" [label="[1]", style=solid]; -"1907 6101" -> "1908 6102" [label="[1]", style=solid]; -"1908 6102" -> "1912 6105" [label="[1]", style=solid]; -"1909 6097" -> "1910 6099" [label="[1]", style=solid]; -"1910 6099" -> "1911 6100" [label="[1]", style=solid]; -"1910 6099" -> "3204 6110" [label="[1]", style=solid]; -"1911 6100" -> "1912 6105" [label="[1]", style=solid]; -"1912 6105" -> "1913 6107" [label="[1, 3]", style=dashed]; -"1913 6107" -> "1914 6108" [label="[1, 1]", style=dashed]; -"1914 6108" -> "1915 6112" [label="[1]", style=dashed]; -"1914 6108" -> "3203 6109" [label="[1]", style=dashed]; -"1915 6112" -> "3176 6520" [label="[1]", style=solid]; -"1916 6038" -> "1917 6040" [label="[1]", style=dashed]; -"1917 6040" -> "1918 6041" [label="[1]", style=dashed]; -"1918 6041" -> "1919 6042" [label="[1]", style=solid]; -"1919 6042" -> "1920 6043" [label="[1, 1]", style=dashed]; -"1920 6043" -> "1921 6044" [label="[1, 1]", style=dashed]; -"1921 6044" -> "1922 6047" [label="[1]", style=dashed]; -"1921 6044" -> "1928 6055" [label="[1]", style=dashed]; -"1922 6047" -> "1924 6048" [label="[1]", style=dashed]; -"1923 6046" -> "1924 6048" [label="[1]", style=solid]; -"1924 6048" -> "1925 6057" [label="[1]", style=solid]; -"1924 6048" -> "1933 6068" [label="[1]", style=solid]; -"1925 6057" -> "1926 6058" [label="[1]", style=solid]; -"1926 6058" -> "1930 6061" [label="[1]", style=solid]; -"1927 6053" -> "1928 6055" [label="[1]", style=solid]; -"1928 6055" -> "1929 6056" [label="[1]", style=solid]; -"1928 6055" -> "3206 6066" [label="[1]", style=solid]; -"1929 6056" -> "1930 6061" [label="[1]", style=solid]; -"1930 6061" -> "1931 6063" [label="[1, 3]", style=dashed]; -"1931 6063" -> "1932 6064" [label="[1, 1]", style=dashed]; -"1932 6064" -> "1933 6068" [label="[1]", style=dashed]; -"1932 6064" -> "3205 6065" [label="[1]", style=dashed]; -"1933 6068" -> "3176 6520" [label="[1]", style=solid]; -"1934 5994" -> "1935 5996" [label="[1]", style=dashed]; -"1935 5996" -> "1936 5997" [label="[1]", style=dashed]; -"1936 5997" -> "1937 5998" [label="[1]", style=solid]; -"1937 5998" -> "1938 5999" [label="[1, 1]", style=dashed]; -"1938 5999" -> "1939 6000" [label="[1, 1]", style=dashed]; -"1939 6000" -> "1940 6003" [label="[1]", style=dashed]; -"1939 6000" -> "1946 6011" [label="[1]", style=dashed]; -"1940 6003" -> "1942 6004" [label="[1]", style=dashed]; -"1941 6002" -> "1942 6004" [label="[1]", style=solid]; -"1942 6004" -> "1943 6013" [label="[1]", style=solid]; -"1942 6004" -> "1951 6024" [label="[1]", style=solid]; -"1943 6013" -> "1944 6014" [label="[1]", style=solid]; -"1944 6014" -> "1948 6017" [label="[1]", style=solid]; -"1945 6009" -> "1946 6011" [label="[1]", style=solid]; -"1946 6011" -> "1947 6012" [label="[1]", style=solid]; -"1946 6011" -> "3208 6022" [label="[1]", style=solid]; -"1947 6012" -> "1948 6017" [label="[1]", style=solid]; -"1948 6017" -> "1949 6019" [label="[1, 3]", style=dashed]; -"1949 6019" -> "1950 6020" [label="[1, 1]", style=dashed]; -"1950 6020" -> "1951 6024" [label="[1]", style=dashed]; -"1950 6020" -> "3207 6021" [label="[1]", style=dashed]; -"1951 6024" -> "3176 6520" [label="[1]", style=solid]; -"1952 5950" -> "1953 5952" [label="[1]", style=dashed]; -"1953 5952" -> "1954 5953" [label="[1]", style=dashed]; -"1954 5953" -> "1955 5954" [label="[1]", style=solid]; -"1955 5954" -> "1956 5955" [label="[1, 1]", style=dashed]; -"1956 5955" -> "1957 5956" [label="[1, 1]", style=dashed]; -"1957 5956" -> "1958 5959" [label="[1]", style=dashed]; -"1957 5956" -> "1964 5967" [label="[1]", style=dashed]; -"1958 5959" -> "1960 5960" [label="[1]", style=dashed]; -"1959 5958" -> "1960 5960" [label="[1]", style=solid]; -"1960 5960" -> "1961 5969" [label="[1]", style=solid]; -"1960 5960" -> "1969 5980" [label="[1]", style=solid]; -"1961 5969" -> "1962 5970" [label="[1]", style=solid]; -"1962 5970" -> "1966 5973" [label="[1]", style=solid]; -"1963 5965" -> "1964 5967" [label="[1]", style=solid]; -"1964 5967" -> "1965 5968" [label="[1]", style=solid]; -"1964 5967" -> "3210 5978" [label="[1]", style=solid]; -"1965 5968" -> "1966 5973" [label="[1]", style=solid]; -"1966 5973" -> "1967 5975" [label="[1, 3]", style=dashed]; -"1967 5975" -> "1968 5976" [label="[1, 1]", style=dashed]; -"1968 5976" -> "1969 5980" [label="[1]", style=dashed]; -"1968 5976" -> "3209 5977" [label="[1]", style=dashed]; -"1969 5980" -> "3176 6520" [label="[1]", style=solid]; -"1970 5906" -> "1971 5908" [label="[1]", style=dashed]; -"1971 5908" -> "1972 5909" [label="[1]", style=dashed]; -"1972 5909" -> "1973 5910" [label="[1]", style=solid]; -"1973 5910" -> "1974 5911" [label="[1, 1]", style=dashed]; -"1974 5911" -> "1975 5912" [label="[1, 1]", style=dashed]; -"1975 5912" -> "1976 5915" [label="[1]", style=dashed]; -"1975 5912" -> "1982 5923" [label="[1]", style=dashed]; -"1976 5915" -> "1978 5916" [label="[1]", style=dashed]; -"1977 5914" -> "1978 5916" [label="[1]", style=solid]; -"1978 5916" -> "1979 5925" [label="[1]", style=solid]; -"1978 5916" -> "1987 5936" [label="[1]", style=solid]; -"1979 5925" -> "1980 5926" [label="[1]", style=solid]; -"1980 5926" -> "1984 5929" [label="[1]", style=solid]; -"1981 5921" -> "1982 5923" [label="[1]", style=solid]; -"1982 5923" -> "1983 5924" [label="[1]", style=solid]; -"1982 5923" -> "3212 5934" [label="[1]", style=solid]; -"1983 5924" -> "1984 5929" [label="[1]", style=solid]; -"1984 5929" -> "1985 5931" [label="[1, 3]", style=dashed]; -"1985 5931" -> "1986 5932" [label="[1, 1]", style=dashed]; -"1986 5932" -> "1987 5936" [label="[1]", style=dashed]; -"1986 5932" -> "3211 5933" [label="[1]", style=dashed]; -"1987 5936" -> "3176 6520" [label="[1]", style=solid]; -"1988 5862" -> "1989 5864" [label="[1]", style=dashed]; -"1989 5864" -> "1990 5865" [label="[1]", style=dashed]; -"1990 5865" -> "1991 5866" [label="[1]", style=solid]; -"1991 5866" -> "1992 5867" [label="[1, 1]", style=dashed]; -"1992 5867" -> "1993 5868" [label="[1, 1]", style=dashed]; -"1993 5868" -> "1994 5871" [label="[1]", style=dashed]; -"1993 5868" -> "2000 5879" [label="[1]", style=dashed]; -"1994 5871" -> "1996 5872" [label="[1]", style=dashed]; -"1995 5870" -> "1996 5872" [label="[1]", style=solid]; -"1996 5872" -> "1997 5881" [label="[1]", style=solid]; -"1996 5872" -> "2005 5892" [label="[1]", style=solid]; -"1997 5881" -> "1998 5882" [label="[1]", style=solid]; -"1998 5882" -> "2002 5885" [label="[1]", style=solid]; -"1999 5877" -> "2000 5879" [label="[1]", style=solid]; -"2000 5879" -> "2001 5880" [label="[1]", style=solid]; -"2000 5879" -> "3214 5890" [label="[1]", style=solid]; -"2001 5880" -> "2002 5885" [label="[1]", style=solid]; -"2002 5885" -> "2003 5887" [label="[1, 3]", style=dashed]; -"2003 5887" -> "2004 5888" [label="[1, 1]", style=dashed]; -"2004 5888" -> "2005 5892" [label="[1]", style=dashed]; -"2004 5888" -> "3213 5889" [label="[1]", style=dashed]; -"2005 5892" -> "3176 6520" [label="[1]", style=solid]; -"2006 5818" -> "2007 5820" [label="[1]", style=dashed]; -"2007 5820" -> "2008 5821" [label="[1]", style=dashed]; -"2008 5821" -> "2009 5822" [label="[1]", style=solid]; -"2009 5822" -> "2010 5823" [label="[1, 1]", style=dashed]; -"2010 5823" -> "2011 5824" [label="[1, 1]", style=dashed]; -"2011 5824" -> "2012 5827" [label="[1]", style=dashed]; -"2011 5824" -> "2018 5835" [label="[1]", style=dashed]; -"2012 5827" -> "2014 5828" [label="[1]", style=dashed]; -"2013 5826" -> "2014 5828" [label="[1]", style=solid]; -"2014 5828" -> "2015 5837" [label="[1]", style=solid]; -"2014 5828" -> "2023 5848" [label="[1]", style=solid]; -"2015 5837" -> "2016 5838" [label="[1]", style=solid]; -"2016 5838" -> "2020 5841" [label="[1]", style=solid]; -"2017 5833" -> "2018 5835" [label="[1]", style=solid]; -"2018 5835" -> "2019 5836" [label="[1]", style=solid]; -"2018 5835" -> "3216 5846" [label="[1]", style=solid]; -"2019 5836" -> "2020 5841" [label="[1]", style=solid]; -"2020 5841" -> "2021 5843" [label="[1, 3]", style=dashed]; -"2021 5843" -> "2022 5844" [label="[1, 1]", style=dashed]; -"2022 5844" -> "2023 5848" [label="[1]", style=dashed]; -"2022 5844" -> "3215 5845" [label="[1]", style=dashed]; -"2023 5848" -> "3176 6520" [label="[1]", style=solid]; -"2024 5774" -> "2025 5776" [label="[1]", style=dashed]; -"2025 5776" -> "2026 5777" [label="[1]", style=dashed]; -"2026 5777" -> "2027 5778" [label="[1]", style=solid]; -"2027 5778" -> "2028 5779" [label="[1, 1]", style=dashed]; -"2028 5779" -> "2029 5780" [label="[1, 1]", style=dashed]; -"2029 5780" -> "2030 5783" [label="[1]", style=dashed]; -"2029 5780" -> "2036 5791" [label="[1]", style=dashed]; -"2030 5783" -> "2032 5784" [label="[1]", style=dashed]; -"2031 5782" -> "2032 5784" [label="[1]", style=solid]; -"2032 5784" -> "2033 5793" [label="[1]", style=solid]; -"2032 5784" -> "2041 5804" [label="[1]", style=solid]; -"2033 5793" -> "2034 5794" [label="[1]", style=solid]; -"2034 5794" -> "2038 5797" [label="[1]", style=solid]; -"2035 5789" -> "2036 5791" [label="[1]", style=solid]; -"2036 5791" -> "2037 5792" [label="[1]", style=solid]; -"2036 5791" -> "3218 5802" [label="[1]", style=solid]; -"2037 5792" -> "2038 5797" [label="[1]", style=solid]; -"2038 5797" -> "2039 5799" [label="[1, 3]", style=dashed]; -"2039 5799" -> "2040 5800" [label="[1, 1]", style=dashed]; -"2040 5800" -> "2041 5804" [label="[1]", style=dashed]; -"2040 5800" -> "3217 5801" [label="[1]", style=dashed]; -"2041 5804" -> "3176 6520" [label="[1]", style=solid]; -"2042 5730" -> "2043 5732" [label="[1]", style=dashed]; -"2043 5732" -> "2044 5733" [label="[1]", style=dashed]; -"2044 5733" -> "2045 5734" [label="[1]", style=solid]; -"2045 5734" -> "2046 5735" [label="[1, 1]", style=dashed]; -"2046 5735" -> "2047 5736" [label="[1, 1]", style=dashed]; -"2047 5736" -> "2048 5739" [label="[1]", style=dashed]; -"2047 5736" -> "2054 5747" [label="[1]", style=dashed]; -"2048 5739" -> "2050 5740" [label="[1]", style=dashed]; -"2049 5738" -> "2050 5740" [label="[1]", style=solid]; -"2050 5740" -> "2051 5749" [label="[1]", style=solid]; -"2050 5740" -> "2059 5760" [label="[1]", style=solid]; -"2051 5749" -> "2052 5750" [label="[1]", style=solid]; -"2052 5750" -> "2056 5753" [label="[1]", style=solid]; -"2053 5745" -> "2054 5747" [label="[1]", style=solid]; -"2054 5747" -> "2055 5748" [label="[1]", style=solid]; -"2054 5747" -> "3220 5758" [label="[1]", style=solid]; -"2055 5748" -> "2056 5753" [label="[1]", style=solid]; -"2056 5753" -> "2057 5755" [label="[1, 3]", style=dashed]; -"2057 5755" -> "2058 5756" [label="[1, 1]", style=dashed]; -"2058 5756" -> "2059 5760" [label="[1]", style=dashed]; -"2058 5756" -> "3219 5757" [label="[1]", style=dashed]; -"2059 5760" -> "3176 6520" [label="[1]", style=solid]; -"2060 5686" -> "2061 5688" [label="[1]", style=dashed]; -"2061 5688" -> "2062 5689" [label="[1]", style=dashed]; -"2062 5689" -> "2063 5690" [label="[1]", style=solid]; -"2063 5690" -> "2064 5691" [label="[1, 1]", style=dashed]; -"2064 5691" -> "2065 5692" [label="[1, 1]", style=dashed]; -"2065 5692" -> "2066 5695" [label="[1]", style=dashed]; -"2065 5692" -> "2072 5703" [label="[1]", style=dashed]; -"2066 5695" -> "2068 5696" [label="[1]", style=dashed]; -"2067 5694" -> "2068 5696" [label="[1]", style=solid]; -"2068 5696" -> "2069 5705" [label="[1]", style=solid]; -"2068 5696" -> "2077 5716" [label="[1]", style=solid]; -"2069 5705" -> "2070 5706" [label="[1]", style=solid]; -"2070 5706" -> "2074 5709" [label="[1]", style=solid]; -"2071 5701" -> "2072 5703" [label="[1]", style=solid]; -"2072 5703" -> "2073 5704" [label="[1]", style=solid]; -"2072 5703" -> "3222 5714" [label="[1]", style=solid]; -"2073 5704" -> "2074 5709" [label="[1]", style=solid]; -"2074 5709" -> "2075 5711" [label="[1, 3]", style=dashed]; -"2075 5711" -> "2076 5712" [label="[1, 1]", style=dashed]; -"2076 5712" -> "2077 5716" [label="[1]", style=dashed]; -"2076 5712" -> "3221 5713" [label="[1]", style=dashed]; -"2077 5716" -> "3176 6520" [label="[1]", style=solid]; -"2078 5642" -> "2079 5644" [label="[1]", style=dashed]; -"2079 5644" -> "2080 5645" [label="[1]", style=dashed]; -"2080 5645" -> "2081 5646" [label="[1]", style=solid]; -"2081 5646" -> "2082 5647" [label="[1, 1]", style=dashed]; -"2082 5647" -> "2083 5648" [label="[1, 1]", style=dashed]; -"2083 5648" -> "2084 5651" [label="[1]", style=dashed]; -"2083 5648" -> "2090 5659" [label="[1]", style=dashed]; -"2084 5651" -> "2086 5652" [label="[1]", style=dashed]; -"2085 5650" -> "2086 5652" [label="[1]", style=solid]; -"2086 5652" -> "2087 5661" [label="[1]", style=solid]; -"2086 5652" -> "2095 5672" [label="[1]", style=solid]; -"2087 5661" -> "2088 5662" [label="[1]", style=solid]; -"2088 5662" -> "2092 5665" [label="[1]", style=solid]; -"2089 5657" -> "2090 5659" [label="[1]", style=solid]; -"2090 5659" -> "2091 5660" [label="[1]", style=solid]; -"2090 5659" -> "3224 5670" [label="[1]", style=solid]; -"2091 5660" -> "2092 5665" [label="[1]", style=solid]; -"2092 5665" -> "2093 5667" [label="[1, 3]", style=dashed]; -"2093 5667" -> "2094 5668" [label="[1, 1]", style=dashed]; -"2094 5668" -> "2095 5672" [label="[1]", style=dashed]; -"2094 5668" -> "3223 5669" [label="[1]", style=dashed]; -"2095 5672" -> "3176 6520" [label="[1]", style=solid]; -"2096 5598" -> "2097 5600" [label="[1]", style=dashed]; -"2097 5600" -> "2098 5601" [label="[1]", style=dashed]; -"2098 5601" -> "2099 5602" [label="[1]", style=solid]; -"2099 5602" -> "2100 5603" [label="[1, 1]", style=dashed]; -"2100 5603" -> "2101 5604" [label="[1, 1]", style=dashed]; -"2101 5604" -> "2102 5607" [label="[1]", style=dashed]; -"2101 5604" -> "2108 5615" [label="[1]", style=dashed]; -"2102 5607" -> "2104 5608" [label="[1]", style=dashed]; -"2103 5606" -> "2104 5608" [label="[1]", style=solid]; -"2104 5608" -> "2105 5617" [label="[1]", style=solid]; -"2104 5608" -> "2113 5628" [label="[1]", style=solid]; -"2105 5617" -> "2106 5618" [label="[1]", style=solid]; -"2106 5618" -> "2110 5621" [label="[1]", style=solid]; -"2107 5613" -> "2108 5615" [label="[1]", style=solid]; -"2108 5615" -> "2109 5616" [label="[1]", style=solid]; -"2108 5615" -> "3226 5626" [label="[1]", style=solid]; -"2109 5616" -> "2110 5621" [label="[1]", style=solid]; -"2110 5621" -> "2111 5623" [label="[1, 3]", style=dashed]; -"2111 5623" -> "2112 5624" [label="[1, 1]", style=dashed]; -"2112 5624" -> "2113 5628" [label="[1]", style=dashed]; -"2112 5624" -> "3225 5625" [label="[1]", style=dashed]; -"2113 5628" -> "3176 6520" [label="[1]", style=solid]; -"2114 5554" -> "2115 5556" [label="[1]", style=dashed]; -"2115 5556" -> "2116 5557" [label="[1]", style=dashed]; -"2116 5557" -> "2117 5558" [label="[1]", style=solid]; -"2117 5558" -> "2118 5559" [label="[1, 1]", style=dashed]; -"2118 5559" -> "2119 5560" [label="[1, 1]", style=dashed]; -"2119 5560" -> "2120 5563" [label="[1]", style=dashed]; -"2119 5560" -> "2126 5571" [label="[1]", style=dashed]; -"2120 5563" -> "2122 5564" [label="[1]", style=dashed]; -"2121 5562" -> "2122 5564" [label="[1]", style=solid]; -"2122 5564" -> "2123 5573" [label="[1]", style=solid]; -"2122 5564" -> "2131 5584" [label="[1]", style=solid]; -"2123 5573" -> "2124 5574" [label="[1]", style=solid]; -"2124 5574" -> "2128 5577" [label="[1]", style=solid]; -"2125 5569" -> "2126 5571" [label="[1]", style=solid]; -"2126 5571" -> "2127 5572" [label="[1]", style=solid]; -"2126 5571" -> "3228 5582" [label="[1]", style=solid]; -"2127 5572" -> "2128 5577" [label="[1]", style=solid]; -"2128 5577" -> "2129 5579" [label="[1, 3]", style=dashed]; -"2129 5579" -> "2130 5580" [label="[1, 1]", style=dashed]; -"2130 5580" -> "2131 5584" [label="[1]", style=dashed]; -"2130 5580" -> "3227 5581" [label="[1]", style=dashed]; -"2131 5584" -> "3176 6520" [label="[1]", style=solid]; -"2132 5510" -> "2133 5512" [label="[1]", style=dashed]; -"2133 5512" -> "2134 5513" [label="[1]", style=dashed]; -"2134 5513" -> "2135 5514" [label="[1]", style=solid]; -"2135 5514" -> "2136 5515" [label="[1, 1]", style=dashed]; -"2136 5515" -> "2137 5516" [label="[1, 1]", style=dashed]; -"2137 5516" -> "2138 5519" [label="[1]", style=dashed]; -"2137 5516" -> "2144 5527" [label="[1]", style=dashed]; -"2138 5519" -> "2140 5520" [label="[1]", style=dashed]; -"2139 5518" -> "2140 5520" [label="[1]", style=solid]; -"2140 5520" -> "2141 5529" [label="[1]", style=solid]; -"2140 5520" -> "2149 5540" [label="[1]", style=solid]; -"2141 5529" -> "2142 5530" [label="[1]", style=solid]; -"2142 5530" -> "2146 5533" [label="[1]", style=solid]; -"2143 5525" -> "2144 5527" [label="[1]", style=solid]; -"2144 5527" -> "2145 5528" [label="[1]", style=solid]; -"2144 5527" -> "3230 5538" [label="[1]", style=solid]; -"2145 5528" -> "2146 5533" [label="[1]", style=solid]; -"2146 5533" -> "2147 5535" [label="[1, 3]", style=dashed]; -"2147 5535" -> "2148 5536" [label="[1, 1]", style=dashed]; -"2148 5536" -> "2149 5540" [label="[1]", style=dashed]; -"2148 5536" -> "3229 5537" [label="[1]", style=dashed]; -"2149 5540" -> "3176 6520" [label="[1]", style=solid]; -"2150 5466" -> "2151 5468" [label="[1]", style=dashed]; -"2151 5468" -> "2152 5469" [label="[1]", style=dashed]; -"2152 5469" -> "2153 5470" [label="[1]", style=solid]; -"2153 5470" -> "2154 5471" [label="[1, 1]", style=dashed]; -"2154 5471" -> "2155 5472" [label="[1, 1]", style=dashed]; -"2155 5472" -> "2156 5475" [label="[1]", style=dashed]; -"2155 5472" -> "2162 5483" [label="[1]", style=dashed]; -"2156 5475" -> "2158 5476" [label="[1]", style=dashed]; -"2157 5474" -> "2158 5476" [label="[1]", style=solid]; -"2158 5476" -> "2159 5485" [label="[1]", style=solid]; -"2158 5476" -> "2167 5496" [label="[1]", style=solid]; -"2159 5485" -> "2160 5486" [label="[1]", style=solid]; -"2160 5486" -> "2164 5489" [label="[1]", style=solid]; -"2161 5481" -> "2162 5483" [label="[1]", style=solid]; -"2162 5483" -> "2163 5484" [label="[1]", style=solid]; -"2162 5483" -> "3232 5494" [label="[1]", style=solid]; -"2163 5484" -> "2164 5489" [label="[1]", style=solid]; -"2164 5489" -> "2165 5491" [label="[1, 3]", style=dashed]; -"2165 5491" -> "2166 5492" [label="[1, 1]", style=dashed]; -"2166 5492" -> "2167 5496" [label="[1]", style=dashed]; -"2166 5492" -> "3231 5493" [label="[1]", style=dashed]; -"2167 5496" -> "3176 6520" [label="[1]", style=solid]; -"2168 5422" -> "2169 5424" [label="[1]", style=dashed]; -"2169 5424" -> "2170 5425" [label="[1]", style=dashed]; -"2170 5425" -> "2171 5426" [label="[1]", style=solid]; -"2171 5426" -> "2172 5427" [label="[1, 1]", style=dashed]; -"2172 5427" -> "2173 5428" [label="[1, 1]", style=dashed]; -"2173 5428" -> "2174 5431" [label="[1]", style=dashed]; -"2173 5428" -> "2180 5439" [label="[1]", style=dashed]; -"2174 5431" -> "2176 5432" [label="[1]", style=dashed]; -"2175 5430" -> "2176 5432" [label="[1]", style=solid]; -"2176 5432" -> "2177 5441" [label="[1]", style=solid]; -"2176 5432" -> "2185 5452" [label="[1]", style=solid]; -"2177 5441" -> "2178 5442" [label="[1]", style=solid]; -"2178 5442" -> "2182 5445" [label="[1]", style=solid]; -"2179 5437" -> "2180 5439" [label="[1]", style=solid]; -"2180 5439" -> "2181 5440" [label="[1]", style=solid]; -"2180 5439" -> "3234 5450" [label="[1]", style=solid]; -"2181 5440" -> "2182 5445" [label="[1]", style=solid]; -"2182 5445" -> "2183 5447" [label="[1, 3]", style=dashed]; -"2183 5447" -> "2184 5448" [label="[1, 1]", style=dashed]; -"2184 5448" -> "2185 5452" [label="[1]", style=dashed]; -"2184 5448" -> "3233 5449" [label="[1]", style=dashed]; -"2185 5452" -> "3176 6520" [label="[1]", style=solid]; -"2186 5378" -> "2187 5380" [label="[1]", style=dashed]; -"2187 5380" -> "2188 5381" [label="[1]", style=dashed]; -"2188 5381" -> "2189 5382" [label="[1]", style=solid]; -"2189 5382" -> "2190 5383" [label="[1, 1]", style=dashed]; -"2190 5383" -> "2191 5384" [label="[1, 1]", style=dashed]; -"2191 5384" -> "2192 5387" [label="[1]", style=dashed]; -"2191 5384" -> "2198 5395" [label="[1]", style=dashed]; -"2192 5387" -> "2194 5388" [label="[1]", style=dashed]; -"2193 5386" -> "2194 5388" [label="[1]", style=solid]; -"2194 5388" -> "2195 5397" [label="[1]", style=solid]; -"2194 5388" -> "2203 5408" [label="[1]", style=solid]; -"2195 5397" -> "2196 5398" [label="[1]", style=solid]; -"2196 5398" -> "2200 5401" [label="[1]", style=solid]; -"2197 5393" -> "2198 5395" [label="[1]", style=solid]; -"2198 5395" -> "2199 5396" [label="[1]", style=solid]; -"2198 5395" -> "3236 5406" [label="[1]", style=solid]; -"2199 5396" -> "2200 5401" [label="[1]", style=solid]; -"2200 5401" -> "2201 5403" [label="[1, 3]", style=dashed]; -"2201 5403" -> "2202 5404" [label="[1, 1]", style=dashed]; -"2202 5404" -> "2203 5408" [label="[1]", style=dashed]; -"2202 5404" -> "3235 5405" [label="[1]", style=dashed]; -"2203 5408" -> "3176 6520" [label="[1]", style=solid]; -"2204 5334" -> "2205 5336" [label="[1]", style=dashed]; -"2205 5336" -> "2206 5337" [label="[1]", style=dashed]; -"2206 5337" -> "2207 5338" [label="[1]", style=solid]; -"2207 5338" -> "2208 5339" [label="[1, 1]", style=dashed]; -"2208 5339" -> "2209 5340" [label="[1, 1]", style=dashed]; -"2209 5340" -> "2210 5343" [label="[1]", style=dashed]; -"2209 5340" -> "2216 5351" [label="[1]", style=dashed]; -"2210 5343" -> "2212 5344" [label="[1]", style=dashed]; -"2211 5342" -> "2212 5344" [label="[1]", style=solid]; -"2212 5344" -> "2213 5353" [label="[1]", style=solid]; -"2212 5344" -> "2221 5364" [label="[1]", style=solid]; -"2213 5353" -> "2214 5354" [label="[1]", style=solid]; -"2214 5354" -> "2218 5357" [label="[1]", style=solid]; -"2215 5349" -> "2216 5351" [label="[1]", style=solid]; -"2216 5351" -> "2217 5352" [label="[1]", style=solid]; -"2216 5351" -> "3238 5362" [label="[1]", style=solid]; -"2217 5352" -> "2218 5357" [label="[1]", style=solid]; -"2218 5357" -> "2219 5359" [label="[1, 3]", style=dashed]; -"2219 5359" -> "2220 5360" [label="[1, 1]", style=dashed]; -"2220 5360" -> "2221 5364" [label="[1]", style=dashed]; -"2220 5360" -> "3237 5361" [label="[1]", style=dashed]; -"2221 5364" -> "3176 6520" [label="[1]", style=solid]; -"2222 5290" -> "2223 5292" [label="[1]", style=dashed]; -"2223 5292" -> "2224 5293" [label="[1]", style=dashed]; -"2224 5293" -> "2225 5294" [label="[1]", style=solid]; -"2225 5294" -> "2226 5295" [label="[1, 1]", style=dashed]; -"2226 5295" -> "2227 5296" [label="[1, 1]", style=dashed]; -"2227 5296" -> "2228 5299" [label="[1]", style=dashed]; -"2227 5296" -> "2234 5307" [label="[1]", style=dashed]; -"2228 5299" -> "2230 5300" [label="[1]", style=dashed]; -"2229 5298" -> "2230 5300" [label="[1]", style=solid]; -"2230 5300" -> "2231 5309" [label="[1]", style=solid]; -"2230 5300" -> "2239 5320" [label="[1]", style=solid]; -"2231 5309" -> "2232 5310" [label="[1]", style=solid]; -"2232 5310" -> "2236 5313" [label="[1]", style=solid]; -"2233 5305" -> "2234 5307" [label="[1]", style=solid]; -"2234 5307" -> "2235 5308" [label="[1]", style=solid]; -"2234 5307" -> "3240 5318" [label="[1]", style=solid]; -"2235 5308" -> "2236 5313" [label="[1]", style=solid]; -"2236 5313" -> "2237 5315" [label="[1, 3]", style=dashed]; -"2237 5315" -> "2238 5316" [label="[1, 1]", style=dashed]; -"2238 5316" -> "2239 5320" [label="[1]", style=dashed]; -"2238 5316" -> "3239 5317" [label="[1]", style=dashed]; -"2239 5320" -> "3176 6520" [label="[1]", style=solid]; -"2240 5246" -> "2241 5248" [label="[1]", style=dashed]; -"2241 5248" -> "2242 5249" [label="[1]", style=dashed]; -"2242 5249" -> "2243 5250" [label="[1]", style=solid]; -"2243 5250" -> "2244 5251" [label="[1, 1]", style=dashed]; -"2244 5251" -> "2245 5252" [label="[1, 1]", style=dashed]; -"2245 5252" -> "2246 5255" [label="[1]", style=dashed]; -"2245 5252" -> "2252 5263" [label="[1]", style=dashed]; -"2246 5255" -> "2248 5256" [label="[1]", style=dashed]; -"2247 5254" -> "2248 5256" [label="[1]", style=solid]; -"2248 5256" -> "2249 5265" [label="[1]", style=solid]; -"2248 5256" -> "2257 5276" [label="[1]", style=solid]; -"2249 5265" -> "2250 5266" [label="[1]", style=solid]; -"2250 5266" -> "2254 5269" [label="[1]", style=solid]; -"2251 5261" -> "2252 5263" [label="[1]", style=solid]; -"2252 5263" -> "2253 5264" [label="[1]", style=solid]; -"2252 5263" -> "3242 5274" [label="[1]", style=solid]; -"2253 5264" -> "2254 5269" [label="[1]", style=solid]; -"2254 5269" -> "2255 5271" [label="[1, 3]", style=dashed]; -"2255 5271" -> "2256 5272" [label="[1, 1]", style=dashed]; -"2256 5272" -> "2257 5276" [label="[1]", style=dashed]; -"2256 5272" -> "3241 5273" [label="[1]", style=dashed]; -"2257 5276" -> "3176 6520" [label="[1]", style=solid]; -"2258 5202" -> "2259 5204" [label="[1]", style=dashed]; -"2259 5204" -> "2260 5205" [label="[1]", style=dashed]; -"2260 5205" -> "2261 5206" [label="[1]", style=solid]; -"2261 5206" -> "2262 5207" [label="[1, 1]", style=dashed]; -"2262 5207" -> "2263 5208" [label="[1, 1]", style=dashed]; -"2263 5208" -> "2264 5211" [label="[1]", style=dashed]; -"2263 5208" -> "2270 5219" [label="[1]", style=dashed]; -"2264 5211" -> "2266 5212" [label="[1]", style=dashed]; -"2265 5210" -> "2266 5212" [label="[1]", style=solid]; -"2266 5212" -> "2267 5221" [label="[1]", style=solid]; -"2266 5212" -> "2275 5232" [label="[1]", style=solid]; -"2267 5221" -> "2268 5222" [label="[1]", style=solid]; -"2268 5222" -> "2272 5225" [label="[1]", style=solid]; -"2269 5217" -> "2270 5219" [label="[1]", style=solid]; -"2270 5219" -> "2271 5220" [label="[1]", style=solid]; -"2270 5219" -> "3244 5230" [label="[1]", style=solid]; -"2271 5220" -> "2272 5225" [label="[1]", style=solid]; -"2272 5225" -> "2273 5227" [label="[1, 3]", style=dashed]; -"2273 5227" -> "2274 5228" [label="[1, 1]", style=dashed]; -"2274 5228" -> "2275 5232" [label="[1]", style=dashed]; -"2274 5228" -> "3243 5229" [label="[1]", style=dashed]; -"2275 5232" -> "3176 6520" [label="[1]", style=solid]; -"2276 5158" -> "2277 5160" [label="[1]", style=dashed]; -"2277 5160" -> "2278 5161" [label="[1]", style=dashed]; -"2278 5161" -> "2279 5162" [label="[1]", style=solid]; -"2279 5162" -> "2280 5163" [label="[1, 1]", style=dashed]; -"2280 5163" -> "2281 5164" [label="[1, 1]", style=dashed]; -"2281 5164" -> "2282 5167" [label="[1]", style=dashed]; -"2281 5164" -> "2288 5175" [label="[1]", style=dashed]; -"2282 5167" -> "2284 5168" [label="[1]", style=dashed]; -"2283 5166" -> "2284 5168" [label="[1]", style=solid]; -"2284 5168" -> "2285 5177" [label="[1]", style=solid]; -"2284 5168" -> "2293 5188" [label="[1]", style=solid]; -"2285 5177" -> "2286 5178" [label="[1]", style=solid]; -"2286 5178" -> "2290 5181" [label="[1]", style=solid]; -"2287 5173" -> "2288 5175" [label="[1]", style=solid]; -"2288 5175" -> "2289 5176" [label="[1]", style=solid]; -"2288 5175" -> "3246 5186" [label="[1]", style=solid]; -"2289 5176" -> "2290 5181" [label="[1]", style=solid]; -"2290 5181" -> "2291 5183" [label="[1, 3]", style=dashed]; -"2291 5183" -> "2292 5184" [label="[1, 1]", style=dashed]; -"2292 5184" -> "2293 5188" [label="[1]", style=dashed]; -"2292 5184" -> "3245 5185" [label="[1]", style=dashed]; -"2293 5188" -> "3176 6520" [label="[1]", style=solid]; -"2294 5114" -> "2295 5116" [label="[1]", style=dashed]; -"2295 5116" -> "2296 5117" [label="[1]", style=dashed]; -"2296 5117" -> "2297 5118" [label="[1]", style=solid]; -"2297 5118" -> "2298 5119" [label="[1, 1]", style=dashed]; -"2298 5119" -> "2299 5120" [label="[1, 1]", style=dashed]; -"2299 5120" -> "2300 5123" [label="[1]", style=dashed]; -"2299 5120" -> "2306 5131" [label="[1]", style=dashed]; -"2300 5123" -> "2302 5124" [label="[1]", style=dashed]; -"2301 5122" -> "2302 5124" [label="[1]", style=solid]; -"2302 5124" -> "2303 5133" [label="[1]", style=solid]; -"2302 5124" -> "2311 5144" [label="[1]", style=solid]; -"2303 5133" -> "2304 5134" [label="[1]", style=solid]; -"2304 5134" -> "2308 5137" [label="[1]", style=solid]; -"2305 5129" -> "2306 5131" [label="[1]", style=solid]; -"2306 5131" -> "2307 5132" [label="[1]", style=solid]; -"2306 5131" -> "3248 5142" [label="[1]", style=solid]; -"2307 5132" -> "2308 5137" [label="[1]", style=solid]; -"2308 5137" -> "2309 5139" [label="[1, 3]", style=dashed]; -"2309 5139" -> "2310 5140" [label="[1, 1]", style=dashed]; -"2310 5140" -> "2311 5144" [label="[1]", style=dashed]; -"2310 5140" -> "3247 5141" [label="[1]", style=dashed]; -"2311 5144" -> "3176 6520" [label="[1]", style=solid]; -"2312 5070" -> "2313 5072" [label="[1]", style=dashed]; -"2313 5072" -> "2314 5073" [label="[1]", style=dashed]; -"2314 5073" -> "2315 5074" [label="[1]", style=solid]; -"2315 5074" -> "2316 5075" [label="[1, 1]", style=dashed]; -"2316 5075" -> "2317 5076" [label="[1, 1]", style=dashed]; -"2317 5076" -> "2318 5079" [label="[1]", style=dashed]; -"2317 5076" -> "2324 5087" [label="[1]", style=dashed]; -"2318 5079" -> "2320 5080" [label="[1]", style=dashed]; -"2319 5078" -> "2320 5080" [label="[1]", style=solid]; -"2320 5080" -> "2321 5089" [label="[1]", style=solid]; -"2320 5080" -> "2329 5100" [label="[1]", style=solid]; -"2321 5089" -> "2322 5090" [label="[1]", style=solid]; -"2322 5090" -> "2326 5093" [label="[1]", style=solid]; -"2323 5085" -> "2324 5087" [label="[1]", style=solid]; -"2324 5087" -> "2325 5088" [label="[1]", style=solid]; -"2324 5087" -> "3250 5098" [label="[1]", style=solid]; -"2325 5088" -> "2326 5093" [label="[1]", style=solid]; -"2326 5093" -> "2327 5095" [label="[1, 3]", style=dashed]; -"2327 5095" -> "2328 5096" [label="[1, 1]", style=dashed]; -"2328 5096" -> "2329 5100" [label="[1]", style=dashed]; -"2328 5096" -> "3249 5097" [label="[1]", style=dashed]; -"2329 5100" -> "3176 6520" [label="[1]", style=solid]; -"2330 5026" -> "2331 5028" [label="[1]", style=dashed]; -"2331 5028" -> "2332 5029" [label="[1]", style=dashed]; -"2332 5029" -> "2333 5030" [label="[1]", style=solid]; -"2333 5030" -> "2334 5031" [label="[1, 1]", style=dashed]; -"2334 5031" -> "2335 5032" [label="[1, 1]", style=dashed]; -"2335 5032" -> "2336 5035" [label="[1]", style=dashed]; -"2335 5032" -> "2342 5043" [label="[1]", style=dashed]; -"2336 5035" -> "2338 5036" [label="[1]", style=dashed]; -"2337 5034" -> "2338 5036" [label="[1]", style=solid]; -"2338 5036" -> "2339 5045" [label="[1]", style=solid]; -"2338 5036" -> "2347 5056" [label="[1]", style=solid]; -"2339 5045" -> "2340 5046" [label="[1]", style=solid]; -"2340 5046" -> "2344 5049" [label="[1]", style=solid]; -"2341 5041" -> "2342 5043" [label="[1]", style=solid]; -"2342 5043" -> "2343 5044" [label="[1]", style=solid]; -"2342 5043" -> "3252 5054" [label="[1]", style=solid]; -"2343 5044" -> "2344 5049" [label="[1]", style=solid]; -"2344 5049" -> "2345 5051" [label="[1, 3]", style=dashed]; -"2345 5051" -> "2346 5052" [label="[1, 1]", style=dashed]; -"2346 5052" -> "2347 5056" [label="[1]", style=dashed]; -"2346 5052" -> "3251 5053" [label="[1]", style=dashed]; -"2347 5056" -> "3176 6520" [label="[1]", style=solid]; -"2348 4982" -> "2349 4984" [label="[1]", style=dashed]; -"2349 4984" -> "2350 4985" [label="[1]", style=dashed]; -"2350 4985" -> "2351 4986" [label="[1]", style=solid]; -"2351 4986" -> "2352 4987" [label="[1, 1]", style=dashed]; -"2352 4987" -> "2353 4988" [label="[1, 1]", style=dashed]; -"2353 4988" -> "2354 4991" [label="[1]", style=dashed]; -"2353 4988" -> "2360 4999" [label="[1]", style=dashed]; -"2354 4991" -> "2356 4992" [label="[1]", style=dashed]; -"2355 4990" -> "2356 4992" [label="[1]", style=solid]; -"2356 4992" -> "2357 5001" [label="[1]", style=solid]; -"2356 4992" -> "2365 5012" [label="[1]", style=solid]; -"2357 5001" -> "2358 5002" [label="[1]", style=solid]; -"2358 5002" -> "2362 5005" [label="[1]", style=solid]; -"2359 4997" -> "2360 4999" [label="[1]", style=solid]; -"2360 4999" -> "2361 5000" [label="[1]", style=solid]; -"2360 4999" -> "3254 5010" [label="[1]", style=solid]; -"2361 5000" -> "2362 5005" [label="[1]", style=solid]; -"2362 5005" -> "2363 5007" [label="[1, 3]", style=dashed]; -"2363 5007" -> "2364 5008" [label="[1, 1]", style=dashed]; -"2364 5008" -> "2365 5012" [label="[1]", style=dashed]; -"2364 5008" -> "3253 5009" [label="[1]", style=dashed]; -"2365 5012" -> "3176 6520" [label="[1]", style=solid]; -"2366 4938" -> "2367 4940" [label="[1]", style=dashed]; -"2367 4940" -> "2368 4941" [label="[1]", style=dashed]; -"2368 4941" -> "2369 4942" [label="[1]", style=solid]; -"2369 4942" -> "2370 4943" [label="[1, 1]", style=dashed]; -"2370 4943" -> "2371 4944" [label="[1, 1]", style=dashed]; -"2371 4944" -> "2372 4947" [label="[1]", style=dashed]; -"2371 4944" -> "2378 4955" [label="[1]", style=dashed]; -"2372 4947" -> "2374 4948" [label="[1]", style=dashed]; -"2373 4946" -> "2374 4948" [label="[1]", style=solid]; -"2374 4948" -> "2375 4957" [label="[1]", style=solid]; -"2374 4948" -> "2383 4968" [label="[1]", style=solid]; -"2375 4957" -> "2376 4958" [label="[1]", style=solid]; -"2376 4958" -> "2380 4961" [label="[1]", style=solid]; -"2377 4953" -> "2378 4955" [label="[1]", style=solid]; -"2378 4955" -> "2379 4956" [label="[1]", style=solid]; -"2378 4955" -> "3256 4966" [label="[1]", style=solid]; -"2379 4956" -> "2380 4961" [label="[1]", style=solid]; -"2380 4961" -> "2381 4963" [label="[1, 3]", style=dashed]; -"2381 4963" -> "2382 4964" [label="[1, 1]", style=dashed]; -"2382 4964" -> "2383 4968" [label="[1]", style=dashed]; -"2382 4964" -> "3255 4965" [label="[1]", style=dashed]; -"2383 4968" -> "3176 6520" [label="[1]", style=solid]; -"2384 4894" -> "2385 4896" [label="[1]", style=dashed]; -"2385 4896" -> "2386 4897" [label="[1]", style=dashed]; -"2386 4897" -> "2387 4898" [label="[1]", style=solid]; -"2387 4898" -> "2388 4899" [label="[1, 1]", style=dashed]; -"2388 4899" -> "2389 4900" [label="[1, 1]", style=dashed]; -"2389 4900" -> "2390 4903" [label="[1]", style=dashed]; -"2389 4900" -> "2396 4911" [label="[1]", style=dashed]; -"2390 4903" -> "2392 4904" [label="[1]", style=dashed]; -"2391 4902" -> "2392 4904" [label="[1]", style=solid]; -"2392 4904" -> "2393 4913" [label="[1]", style=solid]; -"2392 4904" -> "2401 4924" [label="[1]", style=solid]; -"2393 4913" -> "2394 4914" [label="[1]", style=solid]; -"2394 4914" -> "2398 4917" [label="[1]", style=solid]; -"2395 4909" -> "2396 4911" [label="[1]", style=solid]; -"2396 4911" -> "2397 4912" [label="[1]", style=solid]; -"2396 4911" -> "3258 4922" [label="[1]", style=solid]; -"2397 4912" -> "2398 4917" [label="[1]", style=solid]; -"2398 4917" -> "2399 4919" [label="[1, 3]", style=dashed]; -"2399 4919" -> "2400 4920" [label="[1, 1]", style=dashed]; -"2400 4920" -> "2401 4924" [label="[1]", style=dashed]; -"2400 4920" -> "3257 4921" [label="[1]", style=dashed]; -"2401 4924" -> "3176 6520" [label="[1]", style=solid]; -"2402 4850" -> "2403 4852" [label="[1]", style=dashed]; -"2403 4852" -> "2404 4853" [label="[1]", style=dashed]; -"2404 4853" -> "2405 4854" [label="[1]", style=solid]; -"2405 4854" -> "2406 4855" [label="[1, 1]", style=dashed]; -"2406 4855" -> "2407 4856" [label="[1, 1]", style=dashed]; -"2407 4856" -> "2408 4859" [label="[1]", style=dashed]; -"2407 4856" -> "2414 4867" [label="[1]", style=dashed]; -"2408 4859" -> "2410 4860" [label="[1]", style=dashed]; -"2409 4858" -> "2410 4860" [label="[1]", style=solid]; -"2410 4860" -> "2411 4869" [label="[1]", style=solid]; -"2410 4860" -> "2419 4880" [label="[1]", style=solid]; -"2411 4869" -> "2412 4870" [label="[1]", style=solid]; -"2412 4870" -> "2416 4873" [label="[1]", style=solid]; -"2413 4865" -> "2414 4867" [label="[1]", style=solid]; -"2414 4867" -> "2415 4868" [label="[1]", style=solid]; -"2414 4867" -> "3260 4878" [label="[1]", style=solid]; -"2415 4868" -> "2416 4873" [label="[1]", style=solid]; -"2416 4873" -> "2417 4875" [label="[1, 3]", style=dashed]; -"2417 4875" -> "2418 4876" [label="[1, 1]", style=dashed]; -"2418 4876" -> "2419 4880" [label="[1]", style=dashed]; -"2418 4876" -> "3259 4877" [label="[1]", style=dashed]; -"2419 4880" -> "3176 6520" [label="[1]", style=solid]; -"2420 4806" -> "2421 4808" [label="[1]", style=dashed]; -"2421 4808" -> "2422 4809" [label="[1]", style=dashed]; -"2422 4809" -> "2423 4810" [label="[1]", style=solid]; -"2423 4810" -> "2424 4811" [label="[1, 1]", style=dashed]; -"2424 4811" -> "2425 4812" [label="[1, 1]", style=dashed]; -"2425 4812" -> "2426 4815" [label="[1]", style=dashed]; -"2425 4812" -> "2432 4823" [label="[1]", style=dashed]; -"2426 4815" -> "2428 4816" [label="[1]", style=dashed]; -"2427 4814" -> "2428 4816" [label="[1]", style=solid]; -"2428 4816" -> "2429 4825" [label="[1]", style=solid]; -"2428 4816" -> "2437 4836" [label="[1]", style=solid]; -"2429 4825" -> "2430 4826" [label="[1]", style=solid]; -"2430 4826" -> "2434 4829" [label="[1]", style=solid]; -"2431 4821" -> "2432 4823" [label="[1]", style=solid]; -"2432 4823" -> "2433 4824" [label="[1]", style=solid]; -"2432 4823" -> "3262 4834" [label="[1]", style=solid]; -"2433 4824" -> "2434 4829" [label="[1]", style=solid]; -"2434 4829" -> "2435 4831" [label="[1, 3]", style=dashed]; -"2435 4831" -> "2436 4832" [label="[1, 1]", style=dashed]; -"2436 4832" -> "2437 4836" [label="[1]", style=dashed]; -"2436 4832" -> "3261 4833" [label="[1]", style=dashed]; -"2437 4836" -> "3176 6520" [label="[1]", style=solid]; -"2438 4762" -> "2439 4764" [label="[1]", style=dashed]; -"2439 4764" -> "2440 4765" [label="[1]", style=dashed]; -"2440 4765" -> "2441 4766" [label="[1]", style=solid]; -"2441 4766" -> "2442 4767" [label="[1, 1]", style=dashed]; -"2442 4767" -> "2443 4768" [label="[1, 1]", style=dashed]; -"2443 4768" -> "2444 4771" [label="[1]", style=dashed]; -"2443 4768" -> "2450 4779" [label="[1]", style=dashed]; -"2444 4771" -> "2446 4772" [label="[1]", style=dashed]; -"2445 4770" -> "2446 4772" [label="[1]", style=solid]; -"2446 4772" -> "2447 4781" [label="[1]", style=solid]; -"2446 4772" -> "2455 4792" [label="[1]", style=solid]; -"2447 4781" -> "2448 4782" [label="[1]", style=solid]; -"2448 4782" -> "2452 4785" [label="[1]", style=solid]; -"2449 4777" -> "2450 4779" [label="[1]", style=solid]; -"2450 4779" -> "2451 4780" [label="[1]", style=solid]; -"2450 4779" -> "3264 4790" [label="[1]", style=solid]; -"2451 4780" -> "2452 4785" [label="[1]", style=solid]; -"2452 4785" -> "2453 4787" [label="[1, 3]", style=dashed]; -"2453 4787" -> "2454 4788" [label="[1, 1]", style=dashed]; -"2454 4788" -> "2455 4792" [label="[1]", style=dashed]; -"2454 4788" -> "3263 4789" [label="[1]", style=dashed]; -"2455 4792" -> "3176 6520" [label="[1]", style=solid]; -"2456 4718" -> "2457 4720" [label="[1]", style=dashed]; -"2457 4720" -> "2458 4721" [label="[1]", style=dashed]; -"2458 4721" -> "2459 4722" [label="[1]", style=solid]; -"2459 4722" -> "2460 4723" [label="[1, 1]", style=dashed]; -"2460 4723" -> "2461 4724" [label="[1, 1]", style=dashed]; -"2461 4724" -> "2462 4727" [label="[1]", style=dashed]; -"2461 4724" -> "2468 4735" [label="[1]", style=dashed]; -"2462 4727" -> "2464 4728" [label="[1]", style=dashed]; -"2463 4726" -> "2464 4728" [label="[1]", style=solid]; -"2464 4728" -> "2465 4737" [label="[1]", style=solid]; -"2464 4728" -> "2473 4748" [label="[1]", style=solid]; -"2465 4737" -> "2466 4738" [label="[1]", style=solid]; -"2466 4738" -> "2470 4741" [label="[1]", style=solid]; -"2467 4733" -> "2468 4735" [label="[1]", style=solid]; -"2468 4735" -> "2469 4736" [label="[1]", style=solid]; -"2468 4735" -> "3266 4746" [label="[1]", style=solid]; -"2469 4736" -> "2470 4741" [label="[1]", style=solid]; -"2470 4741" -> "2471 4743" [label="[1, 3]", style=dashed]; -"2471 4743" -> "2472 4744" [label="[1, 1]", style=dashed]; -"2472 4744" -> "2473 4748" [label="[1]", style=dashed]; -"2472 4744" -> "3265 4745" [label="[1]", style=dashed]; -"2473 4748" -> "3176 6520" [label="[1]", style=solid]; -"2474 4674" -> "2475 4676" [label="[1]", style=dashed]; -"2475 4676" -> "2476 4677" [label="[1]", style=dashed]; -"2476 4677" -> "2477 4678" [label="[1]", style=solid]; -"2477 4678" -> "2478 4679" [label="[1, 1]", style=dashed]; -"2478 4679" -> "2479 4680" [label="[1, 1]", style=dashed]; -"2479 4680" -> "2480 4683" [label="[1]", style=dashed]; -"2479 4680" -> "2486 4691" [label="[1]", style=dashed]; -"2480 4683" -> "2482 4684" [label="[1]", style=dashed]; -"2481 4682" -> "2482 4684" [label="[1]", style=solid]; -"2482 4684" -> "2483 4693" [label="[1]", style=solid]; -"2482 4684" -> "2491 4704" [label="[1]", style=solid]; -"2483 4693" -> "2484 4694" [label="[1]", style=solid]; -"2484 4694" -> "2488 4697" [label="[1]", style=solid]; -"2485 4689" -> "2486 4691" [label="[1]", style=solid]; -"2486 4691" -> "2487 4692" [label="[1]", style=solid]; -"2486 4691" -> "3268 4702" [label="[1]", style=solid]; -"2487 4692" -> "2488 4697" [label="[1]", style=solid]; -"2488 4697" -> "2489 4699" [label="[1, 3]", style=dashed]; -"2489 4699" -> "2490 4700" [label="[1, 1]", style=dashed]; -"2490 4700" -> "2491 4704" [label="[1]", style=dashed]; -"2490 4700" -> "3267 4701" [label="[1]", style=dashed]; -"2491 4704" -> "3176 6520" [label="[1]", style=solid]; -"2492 4630" -> "2493 4632" [label="[1]", style=dashed]; -"2493 4632" -> "2494 4633" [label="[1]", style=dashed]; -"2494 4633" -> "2495 4634" [label="[1]", style=solid]; -"2495 4634" -> "2496 4635" [label="[1, 1]", style=dashed]; -"2496 4635" -> "2497 4636" [label="[1, 1]", style=dashed]; -"2497 4636" -> "2498 4639" [label="[1]", style=dashed]; -"2497 4636" -> "2504 4647" [label="[1]", style=dashed]; -"2498 4639" -> "2500 4640" [label="[1]", style=dashed]; -"2499 4638" -> "2500 4640" [label="[1]", style=solid]; -"2500 4640" -> "2501 4649" [label="[1]", style=solid]; -"2500 4640" -> "2509 4660" [label="[1]", style=solid]; -"2501 4649" -> "2502 4650" [label="[1]", style=solid]; -"2502 4650" -> "2506 4653" [label="[1]", style=solid]; -"2503 4645" -> "2504 4647" [label="[1]", style=solid]; -"2504 4647" -> "2505 4648" [label="[1]", style=solid]; -"2504 4647" -> "3270 4658" [label="[1]", style=solid]; -"2505 4648" -> "2506 4653" [label="[1]", style=solid]; -"2506 4653" -> "2507 4655" [label="[1, 3]", style=dashed]; -"2507 4655" -> "2508 4656" [label="[1, 1]", style=dashed]; -"2508 4656" -> "2509 4660" [label="[1]", style=dashed]; -"2508 4656" -> "3269 4657" [label="[1]", style=dashed]; -"2509 4660" -> "3176 6520" [label="[1]", style=solid]; -"2510 4586" -> "2511 4588" [label="[1]", style=dashed]; -"2511 4588" -> "2512 4589" [label="[1]", style=dashed]; -"2512 4589" -> "2513 4590" [label="[1]", style=solid]; -"2513 4590" -> "2514 4591" [label="[1, 1]", style=dashed]; -"2514 4591" -> "2515 4592" [label="[1, 1]", style=dashed]; -"2515 4592" -> "2516 4595" [label="[1]", style=dashed]; -"2515 4592" -> "2522 4603" [label="[1]", style=dashed]; -"2516 4595" -> "2518 4596" [label="[1]", style=dashed]; -"2517 4594" -> "2518 4596" [label="[1]", style=solid]; -"2518 4596" -> "2519 4605" [label="[1]", style=solid]; -"2518 4596" -> "2527 4616" [label="[1]", style=solid]; -"2519 4605" -> "2520 4606" [label="[1]", style=solid]; -"2520 4606" -> "2524 4609" [label="[1]", style=solid]; -"2521 4601" -> "2522 4603" [label="[1]", style=solid]; -"2522 4603" -> "2523 4604" [label="[1]", style=solid]; -"2522 4603" -> "3272 4614" [label="[1]", style=solid]; -"2523 4604" -> "2524 4609" [label="[1]", style=solid]; -"2524 4609" -> "2525 4611" [label="[1, 3]", style=dashed]; -"2525 4611" -> "2526 4612" [label="[1, 1]", style=dashed]; -"2526 4612" -> "2527 4616" [label="[1]", style=dashed]; -"2526 4612" -> "3271 4613" [label="[1]", style=dashed]; -"2527 4616" -> "3176 6520" [label="[1]", style=solid]; -"2528 4542" -> "2529 4544" [label="[1]", style=dashed]; -"2529 4544" -> "2530 4545" [label="[1]", style=dashed]; -"2530 4545" -> "2531 4546" [label="[1]", style=solid]; -"2531 4546" -> "2532 4547" [label="[1, 1]", style=dashed]; -"2532 4547" -> "2533 4548" [label="[1, 1]", style=dashed]; -"2533 4548" -> "2534 4551" [label="[1]", style=dashed]; -"2533 4548" -> "2540 4559" [label="[1]", style=dashed]; -"2534 4551" -> "2536 4552" [label="[1]", style=dashed]; -"2535 4550" -> "2536 4552" [label="[1]", style=solid]; -"2536 4552" -> "2537 4561" [label="[1]", style=solid]; -"2536 4552" -> "2545 4572" [label="[1]", style=solid]; -"2537 4561" -> "2538 4562" [label="[1]", style=solid]; -"2538 4562" -> "2542 4565" [label="[1]", style=solid]; -"2539 4557" -> "2540 4559" [label="[1]", style=solid]; -"2540 4559" -> "2541 4560" [label="[1]", style=solid]; -"2540 4559" -> "3274 4570" [label="[1]", style=solid]; -"2541 4560" -> "2542 4565" [label="[1]", style=solid]; -"2542 4565" -> "2543 4567" [label="[1, 3]", style=dashed]; -"2543 4567" -> "2544 4568" [label="[1, 1]", style=dashed]; -"2544 4568" -> "2545 4572" [label="[1]", style=dashed]; -"2544 4568" -> "3273 4569" [label="[1]", style=dashed]; -"2545 4572" -> "3176 6520" [label="[1]", style=solid]; -"2546 4498" -> "2547 4500" [label="[1]", style=dashed]; -"2547 4500" -> "2548 4501" [label="[1]", style=dashed]; -"2548 4501" -> "2549 4502" [label="[1]", style=solid]; -"2549 4502" -> "2550 4503" [label="[1, 1]", style=dashed]; -"2550 4503" -> "2551 4504" [label="[1, 1]", style=dashed]; -"2551 4504" -> "2552 4507" [label="[1]", style=dashed]; -"2551 4504" -> "2558 4515" [label="[1]", style=dashed]; -"2552 4507" -> "2554 4508" [label="[1]", style=dashed]; -"2553 4506" -> "2554 4508" [label="[1]", style=solid]; -"2554 4508" -> "2555 4517" [label="[1]", style=solid]; -"2554 4508" -> "2563 4528" [label="[1]", style=solid]; -"2555 4517" -> "2556 4518" [label="[1]", style=solid]; -"2556 4518" -> "2560 4521" [label="[1]", style=solid]; -"2557 4513" -> "2558 4515" [label="[1]", style=solid]; -"2558 4515" -> "2559 4516" [label="[1]", style=solid]; -"2558 4515" -> "3276 4526" [label="[1]", style=solid]; -"2559 4516" -> "2560 4521" [label="[1]", style=solid]; -"2560 4521" -> "2561 4523" [label="[1, 3]", style=dashed]; -"2561 4523" -> "2562 4524" [label="[1, 1]", style=dashed]; -"2562 4524" -> "2563 4528" [label="[1]", style=dashed]; -"2562 4524" -> "3275 4525" [label="[1]", style=dashed]; -"2563 4528" -> "3176 6520" [label="[1]", style=solid]; -"2564 4454" -> "2565 4456" [label="[1]", style=dashed]; -"2565 4456" -> "2566 4457" [label="[1]", style=dashed]; -"2566 4457" -> "2567 4458" [label="[1]", style=solid]; -"2567 4458" -> "2568 4459" [label="[1, 1]", style=dashed]; -"2568 4459" -> "2569 4460" [label="[1, 1]", style=dashed]; -"2569 4460" -> "2570 4463" [label="[1]", style=dashed]; -"2569 4460" -> "2576 4471" [label="[1]", style=dashed]; -"2570 4463" -> "2572 4464" [label="[1]", style=dashed]; -"2571 4462" -> "2572 4464" [label="[1]", style=solid]; -"2572 4464" -> "2573 4473" [label="[1]", style=solid]; -"2572 4464" -> "2581 4484" [label="[1]", style=solid]; -"2573 4473" -> "2574 4474" [label="[1]", style=solid]; -"2574 4474" -> "2578 4477" [label="[1]", style=solid]; -"2575 4469" -> "2576 4471" [label="[1]", style=solid]; -"2576 4471" -> "2577 4472" [label="[1]", style=solid]; -"2576 4471" -> "3278 4482" [label="[1]", style=solid]; -"2577 4472" -> "2578 4477" [label="[1]", style=solid]; -"2578 4477" -> "2579 4479" [label="[1, 3]", style=dashed]; -"2579 4479" -> "2580 4480" [label="[1, 1]", style=dashed]; -"2580 4480" -> "2581 4484" [label="[1]", style=dashed]; -"2580 4480" -> "3277 4481" [label="[1]", style=dashed]; -"2581 4484" -> "3176 6520" [label="[1]", style=solid]; -"2582 4410" -> "2583 4412" [label="[1]", style=dashed]; -"2583 4412" -> "2584 4413" [label="[1]", style=dashed]; -"2584 4413" -> "2585 4414" [label="[1]", style=solid]; -"2585 4414" -> "2586 4415" [label="[1, 1]", style=dashed]; -"2586 4415" -> "2587 4416" [label="[1, 1]", style=dashed]; -"2587 4416" -> "2588 4419" [label="[1]", style=dashed]; -"2587 4416" -> "2594 4427" [label="[1]", style=dashed]; -"2588 4419" -> "2590 4420" [label="[1]", style=dashed]; -"2589 4418" -> "2590 4420" [label="[1]", style=solid]; -"2590 4420" -> "2591 4429" [label="[1]", style=solid]; -"2590 4420" -> "2599 4440" [label="[1]", style=solid]; -"2591 4429" -> "2592 4430" [label="[1]", style=solid]; -"2592 4430" -> "2596 4433" [label="[1]", style=solid]; -"2593 4425" -> "2594 4427" [label="[1]", style=solid]; -"2594 4427" -> "2595 4428" [label="[1]", style=solid]; -"2594 4427" -> "3280 4438" [label="[1]", style=solid]; -"2595 4428" -> "2596 4433" [label="[1]", style=solid]; -"2596 4433" -> "2597 4435" [label="[1, 3]", style=dashed]; -"2597 4435" -> "2598 4436" [label="[1, 1]", style=dashed]; -"2598 4436" -> "2599 4440" [label="[1]", style=dashed]; -"2598 4436" -> "3279 4437" [label="[1]", style=dashed]; -"2599 4440" -> "3176 6520" [label="[1]", style=solid]; -"2600 4366" -> "2601 4368" [label="[1]", style=dashed]; -"2601 4368" -> "2602 4369" [label="[1]", style=dashed]; -"2602 4369" -> "2603 4370" [label="[1]", style=solid]; -"2603 4370" -> "2604 4371" [label="[1, 1]", style=dashed]; -"2604 4371" -> "2605 4372" [label="[1, 1]", style=dashed]; -"2605 4372" -> "2606 4375" [label="[1]", style=dashed]; -"2605 4372" -> "2612 4383" [label="[1]", style=dashed]; -"2606 4375" -> "2608 4376" [label="[1]", style=dashed]; -"2607 4374" -> "2608 4376" [label="[1]", style=solid]; -"2608 4376" -> "2609 4385" [label="[1]", style=solid]; -"2608 4376" -> "2617 4396" [label="[1]", style=solid]; -"2609 4385" -> "2610 4386" [label="[1]", style=solid]; -"2610 4386" -> "2614 4389" [label="[1]", style=solid]; -"2611 4381" -> "2612 4383" [label="[1]", style=solid]; -"2612 4383" -> "2613 4384" [label="[1]", style=solid]; -"2612 4383" -> "3282 4394" [label="[1]", style=solid]; -"2613 4384" -> "2614 4389" [label="[1]", style=solid]; -"2614 4389" -> "2615 4391" [label="[1, 3]", style=dashed]; -"2615 4391" -> "2616 4392" [label="[1, 1]", style=dashed]; -"2616 4392" -> "2617 4396" [label="[1]", style=dashed]; -"2616 4392" -> "3281 4393" [label="[1]", style=dashed]; -"2617 4396" -> "3176 6520" [label="[1]", style=solid]; -"2618 4322" -> "2619 4324" [label="[1]", style=dashed]; -"2619 4324" -> "2620 4325" [label="[1]", style=dashed]; -"2620 4325" -> "2621 4326" [label="[1]", style=solid]; -"2621 4326" -> "2622 4327" [label="[1, 1]", style=dashed]; -"2622 4327" -> "2623 4328" [label="[1, 1]", style=dashed]; -"2623 4328" -> "2624 4331" [label="[1]", style=dashed]; -"2623 4328" -> "2630 4339" [label="[1]", style=dashed]; -"2624 4331" -> "2626 4332" [label="[1]", style=dashed]; -"2625 4330" -> "2626 4332" [label="[1]", style=solid]; -"2626 4332" -> "2627 4341" [label="[1]", style=solid]; -"2626 4332" -> "2635 4352" [label="[1]", style=solid]; -"2627 4341" -> "2628 4342" [label="[1]", style=solid]; -"2628 4342" -> "2632 4345" [label="[1]", style=solid]; -"2629 4337" -> "2630 4339" [label="[1]", style=solid]; -"2630 4339" -> "2631 4340" [label="[1]", style=solid]; -"2630 4339" -> "3284 4350" [label="[1]", style=solid]; -"2631 4340" -> "2632 4345" [label="[1]", style=solid]; -"2632 4345" -> "2633 4347" [label="[1, 3]", style=dashed]; -"2633 4347" -> "2634 4348" [label="[1, 1]", style=dashed]; -"2634 4348" -> "2635 4352" [label="[1]", style=dashed]; -"2634 4348" -> "3283 4349" [label="[1]", style=dashed]; -"2635 4352" -> "3176 6520" [label="[1]", style=solid]; -"2636 4278" -> "2637 4280" [label="[1]", style=dashed]; -"2637 4280" -> "2638 4281" [label="[1]", style=dashed]; -"2638 4281" -> "2639 4282" [label="[1]", style=solid]; -"2639 4282" -> "2640 4283" [label="[1, 1]", style=dashed]; -"2640 4283" -> "2641 4284" [label="[1, 1]", style=dashed]; -"2641 4284" -> "2642 4287" [label="[1]", style=dashed]; -"2641 4284" -> "2648 4295" [label="[1]", style=dashed]; -"2642 4287" -> "2644 4288" [label="[1]", style=dashed]; -"2643 4286" -> "2644 4288" [label="[1]", style=solid]; -"2644 4288" -> "2645 4297" [label="[1]", style=solid]; -"2644 4288" -> "2653 4308" [label="[1]", style=solid]; -"2645 4297" -> "2646 4298" [label="[1]", style=solid]; -"2646 4298" -> "2650 4301" [label="[1]", style=solid]; -"2647 4293" -> "2648 4295" [label="[1]", style=solid]; -"2648 4295" -> "2649 4296" [label="[1]", style=solid]; -"2648 4295" -> "3286 4306" [label="[1]", style=solid]; -"2649 4296" -> "2650 4301" [label="[1]", style=solid]; -"2650 4301" -> "2651 4303" [label="[1, 3]", style=dashed]; -"2651 4303" -> "2652 4304" [label="[1, 1]", style=dashed]; -"2652 4304" -> "2653 4308" [label="[1]", style=dashed]; -"2652 4304" -> "3285 4305" [label="[1]", style=dashed]; -"2653 4308" -> "3176 6520" [label="[1]", style=solid]; -"2654 4234" -> "2655 4236" [label="[1]", style=dashed]; -"2655 4236" -> "2656 4237" [label="[1]", style=dashed]; -"2656 4237" -> "2657 4238" [label="[1]", style=solid]; -"2657 4238" -> "2658 4239" [label="[1, 1]", style=dashed]; -"2658 4239" -> "2659 4240" [label="[1, 1]", style=dashed]; -"2659 4240" -> "2660 4243" [label="[1]", style=dashed]; -"2659 4240" -> "2666 4251" [label="[1]", style=dashed]; -"2660 4243" -> "2662 4244" [label="[1]", style=dashed]; -"2661 4242" -> "2662 4244" [label="[1]", style=solid]; -"2662 4244" -> "2663 4253" [label="[1]", style=solid]; -"2662 4244" -> "2671 4264" [label="[1]", style=solid]; -"2663 4253" -> "2664 4254" [label="[1]", style=solid]; -"2664 4254" -> "2668 4257" [label="[1]", style=solid]; -"2665 4249" -> "2666 4251" [label="[1]", style=solid]; -"2666 4251" -> "2667 4252" [label="[1]", style=solid]; -"2666 4251" -> "3288 4262" [label="[1]", style=solid]; -"2667 4252" -> "2668 4257" [label="[1]", style=solid]; -"2668 4257" -> "2669 4259" [label="[1, 3]", style=dashed]; -"2669 4259" -> "2670 4260" [label="[1, 1]", style=dashed]; -"2670 4260" -> "2671 4264" [label="[1]", style=dashed]; -"2670 4260" -> "3287 4261" [label="[1]", style=dashed]; -"2671 4264" -> "3176 6520" [label="[1]", style=solid]; -"2672 4190" -> "2673 4192" [label="[1]", style=dashed]; -"2673 4192" -> "2674 4193" [label="[1]", style=dashed]; -"2674 4193" -> "2675 4194" [label="[1]", style=solid]; -"2675 4194" -> "2676 4195" [label="[1, 1]", style=dashed]; -"2676 4195" -> "2677 4196" [label="[1, 1]", style=dashed]; -"2677 4196" -> "2678 4199" [label="[1]", style=dashed]; -"2677 4196" -> "2684 4207" [label="[1]", style=dashed]; -"2678 4199" -> "2680 4200" [label="[1]", style=dashed]; -"2679 4198" -> "2680 4200" [label="[1]", style=solid]; -"2680 4200" -> "2681 4209" [label="[1]", style=solid]; -"2680 4200" -> "2689 4220" [label="[1]", style=solid]; -"2681 4209" -> "2682 4210" [label="[1]", style=solid]; -"2682 4210" -> "2686 4213" [label="[1]", style=solid]; -"2683 4205" -> "2684 4207" [label="[1]", style=solid]; -"2684 4207" -> "2685 4208" [label="[1]", style=solid]; -"2684 4207" -> "3290 4218" [label="[1]", style=solid]; -"2685 4208" -> "2686 4213" [label="[1]", style=solid]; -"2686 4213" -> "2687 4215" [label="[1, 3]", style=dashed]; -"2687 4215" -> "2688 4216" [label="[1, 1]", style=dashed]; -"2688 4216" -> "2689 4220" [label="[1]", style=dashed]; -"2688 4216" -> "3289 4217" [label="[1]", style=dashed]; -"2689 4220" -> "3176 6520" [label="[1]", style=solid]; -"2690 4146" -> "2691 4148" [label="[1]", style=dashed]; -"2691 4148" -> "2692 4149" [label="[1]", style=dashed]; -"2692 4149" -> "2693 4150" [label="[1]", style=solid]; -"2693 4150" -> "2694 4151" [label="[1, 1]", style=dashed]; -"2694 4151" -> "2695 4152" [label="[1, 1]", style=dashed]; -"2695 4152" -> "2696 4155" [label="[1]", style=dashed]; -"2695 4152" -> "2702 4163" [label="[1]", style=dashed]; -"2696 4155" -> "2698 4156" [label="[1]", style=dashed]; -"2697 4154" -> "2698 4156" [label="[1]", style=solid]; -"2698 4156" -> "2699 4165" [label="[1]", style=solid]; -"2698 4156" -> "2707 4176" [label="[1]", style=solid]; -"2699 4165" -> "2700 4166" [label="[1]", style=solid]; -"2700 4166" -> "2704 4169" [label="[1]", style=solid]; -"2701 4161" -> "2702 4163" [label="[1]", style=solid]; -"2702 4163" -> "2703 4164" [label="[1]", style=solid]; -"2702 4163" -> "3292 4174" [label="[1]", style=solid]; -"2703 4164" -> "2704 4169" [label="[1]", style=solid]; -"2704 4169" -> "2705 4171" [label="[1, 3]", style=dashed]; -"2705 4171" -> "2706 4172" [label="[1, 1]", style=dashed]; -"2706 4172" -> "2707 4176" [label="[1]", style=dashed]; -"2706 4172" -> "3291 4173" [label="[1]", style=dashed]; -"2707 4176" -> "3176 6520" [label="[1]", style=solid]; -"2708 4102" -> "2709 4104" [label="[1]", style=dashed]; -"2709 4104" -> "2710 4105" [label="[1]", style=dashed]; -"2710 4105" -> "2711 4106" [label="[1]", style=solid]; -"2711 4106" -> "2712 4107" [label="[1, 1]", style=dashed]; -"2712 4107" -> "2713 4108" [label="[1, 1]", style=dashed]; -"2713 4108" -> "2714 4111" [label="[1]", style=dashed]; -"2713 4108" -> "2720 4119" [label="[1]", style=dashed]; -"2714 4111" -> "2716 4112" [label="[1]", style=dashed]; -"2715 4110" -> "2716 4112" [label="[1]", style=solid]; -"2716 4112" -> "2717 4121" [label="[1]", style=solid]; -"2716 4112" -> "2725 4132" [label="[1]", style=solid]; -"2717 4121" -> "2718 4122" [label="[1]", style=solid]; -"2718 4122" -> "2722 4125" [label="[1]", style=solid]; -"2719 4117" -> "2720 4119" [label="[1]", style=solid]; -"2720 4119" -> "2721 4120" [label="[1]", style=solid]; -"2720 4119" -> "3294 4130" [label="[1]", style=solid]; -"2721 4120" -> "2722 4125" [label="[1]", style=solid]; -"2722 4125" -> "2723 4127" [label="[1, 3]", style=dashed]; -"2723 4127" -> "2724 4128" [label="[1, 1]", style=dashed]; -"2724 4128" -> "2725 4132" [label="[1]", style=dashed]; -"2724 4128" -> "3293 4129" [label="[1]", style=dashed]; -"2725 4132" -> "3176 6520" [label="[1]", style=solid]; -"2726 4058" -> "2727 4060" [label="[1]", style=dashed]; -"2727 4060" -> "2728 4061" [label="[1]", style=dashed]; -"2728 4061" -> "2729 4062" [label="[1]", style=solid]; -"2729 4062" -> "2730 4063" [label="[1, 1]", style=dashed]; -"2730 4063" -> "2731 4064" [label="[1, 1]", style=dashed]; -"2731 4064" -> "2732 4067" [label="[1]", style=dashed]; -"2731 4064" -> "2738 4075" [label="[1]", style=dashed]; -"2732 4067" -> "2734 4068" [label="[1]", style=dashed]; -"2733 4066" -> "2734 4068" [label="[1]", style=solid]; -"2734 4068" -> "2735 4077" [label="[1]", style=solid]; -"2734 4068" -> "2743 4088" [label="[1]", style=solid]; -"2735 4077" -> "2736 4078" [label="[1]", style=solid]; -"2736 4078" -> "2740 4081" [label="[1]", style=solid]; -"2737 4073" -> "2738 4075" [label="[1]", style=solid]; -"2738 4075" -> "2739 4076" [label="[1]", style=solid]; -"2738 4075" -> "3296 4086" [label="[1]", style=solid]; -"2739 4076" -> "2740 4081" [label="[1]", style=solid]; -"2740 4081" -> "2741 4083" [label="[1, 3]", style=dashed]; -"2741 4083" -> "2742 4084" [label="[1, 1]", style=dashed]; -"2742 4084" -> "2743 4088" [label="[1]", style=dashed]; -"2742 4084" -> "3295 4085" [label="[1]", style=dashed]; -"2743 4088" -> "3176 6520" [label="[1]", style=solid]; -"2744 4014" -> "2745 4016" [label="[1]", style=dashed]; -"2745 4016" -> "2746 4017" [label="[1]", style=dashed]; -"2746 4017" -> "2747 4018" [label="[1]", style=solid]; -"2747 4018" -> "2748 4019" [label="[1, 1]", style=dashed]; -"2748 4019" -> "2749 4020" [label="[1, 1]", style=dashed]; -"2749 4020" -> "2750 4023" [label="[1]", style=dashed]; -"2749 4020" -> "2756 4031" [label="[1]", style=dashed]; -"2750 4023" -> "2752 4024" [label="[1]", style=dashed]; -"2751 4022" -> "2752 4024" [label="[1]", style=solid]; -"2752 4024" -> "2753 4033" [label="[1]", style=solid]; -"2752 4024" -> "2761 4044" [label="[1]", style=solid]; -"2753 4033" -> "2754 4034" [label="[1]", style=solid]; -"2754 4034" -> "2758 4037" [label="[1]", style=solid]; -"2755 4029" -> "2756 4031" [label="[1]", style=solid]; -"2756 4031" -> "2757 4032" [label="[1]", style=solid]; -"2756 4031" -> "3298 4042" [label="[1]", style=solid]; -"2757 4032" -> "2758 4037" [label="[1]", style=solid]; -"2758 4037" -> "2759 4039" [label="[1, 3]", style=dashed]; -"2759 4039" -> "2760 4040" [label="[1, 1]", style=dashed]; -"2760 4040" -> "2761 4044" [label="[1]", style=dashed]; -"2760 4040" -> "3297 4041" [label="[1]", style=dashed]; -"2761 4044" -> "3176 6520" [label="[1]", style=solid]; -"2762 3970" -> "2763 3972" [label="[1]", style=dashed]; -"2763 3972" -> "2764 3973" [label="[1]", style=dashed]; -"2764 3973" -> "2765 3974" [label="[1]", style=solid]; -"2765 3974" -> "2766 3975" [label="[1, 1]", style=dashed]; -"2766 3975" -> "2767 3976" [label="[1, 1]", style=dashed]; -"2767 3976" -> "2768 3979" [label="[1]", style=dashed]; -"2767 3976" -> "2774 3987" [label="[1]", style=dashed]; -"2768 3979" -> "2770 3980" [label="[1]", style=dashed]; -"2769 3978" -> "2770 3980" [label="[1]", style=solid]; -"2770 3980" -> "2771 3989" [label="[1]", style=solid]; -"2770 3980" -> "2779 4000" [label="[1]", style=solid]; -"2771 3989" -> "2772 3990" [label="[1]", style=solid]; -"2772 3990" -> "2776 3993" [label="[1]", style=solid]; -"2773 3985" -> "2774 3987" [label="[1]", style=solid]; -"2774 3987" -> "2775 3988" [label="[1]", style=solid]; -"2774 3987" -> "3300 3998" [label="[1]", style=solid]; -"2775 3988" -> "2776 3993" [label="[1]", style=solid]; -"2776 3993" -> "2777 3995" [label="[1, 3]", style=dashed]; -"2777 3995" -> "2778 3996" [label="[1, 1]", style=dashed]; -"2778 3996" -> "2779 4000" [label="[1]", style=dashed]; -"2778 3996" -> "3299 3997" [label="[1]", style=dashed]; -"2779 4000" -> "3176 6520" [label="[1]", style=solid]; -"2780 3926" -> "2781 3928" [label="[1]", style=dashed]; -"2781 3928" -> "2782 3929" [label="[1]", style=dashed]; -"2782 3929" -> "2783 3930" [label="[1]", style=solid]; -"2783 3930" -> "2784 3931" [label="[1, 1]", style=dashed]; -"2784 3931" -> "2785 3932" [label="[1, 1]", style=dashed]; -"2785 3932" -> "2786 3935" [label="[1]", style=dashed]; -"2785 3932" -> "2792 3943" [label="[1]", style=dashed]; -"2786 3935" -> "2788 3936" [label="[1]", style=dashed]; -"2787 3934" -> "2788 3936" [label="[1]", style=solid]; -"2788 3936" -> "2789 3945" [label="[1]", style=solid]; -"2788 3936" -> "2797 3956" [label="[1]", style=solid]; -"2789 3945" -> "2790 3946" [label="[1]", style=solid]; -"2790 3946" -> "2794 3949" [label="[1]", style=solid]; -"2791 3941" -> "2792 3943" [label="[1]", style=solid]; -"2792 3943" -> "2793 3944" [label="[1]", style=solid]; -"2792 3943" -> "3302 3954" [label="[1]", style=solid]; -"2793 3944" -> "2794 3949" [label="[1]", style=solid]; -"2794 3949" -> "2795 3951" [label="[1, 3]", style=dashed]; -"2795 3951" -> "2796 3952" [label="[1, 1]", style=dashed]; -"2796 3952" -> "2797 3956" [label="[1]", style=dashed]; -"2796 3952" -> "3301 3953" [label="[1]", style=dashed]; -"2797 3956" -> "3176 6520" [label="[1]", style=solid]; -"2798 3882" -> "2799 3884" [label="[1]", style=dashed]; -"2799 3884" -> "2800 3885" [label="[1]", style=dashed]; -"2800 3885" -> "2801 3886" [label="[1]", style=solid]; -"2801 3886" -> "2802 3887" [label="[1, 1]", style=dashed]; -"2802 3887" -> "2803 3888" [label="[1, 1]", style=dashed]; -"2803 3888" -> "2804 3891" [label="[1]", style=dashed]; -"2803 3888" -> "2810 3899" [label="[1]", style=dashed]; -"2804 3891" -> "2806 3892" [label="[1]", style=dashed]; -"2805 3890" -> "2806 3892" [label="[1]", style=solid]; -"2806 3892" -> "2807 3901" [label="[1]", style=solid]; -"2806 3892" -> "2815 3912" [label="[1]", style=solid]; -"2807 3901" -> "2808 3902" [label="[1]", style=solid]; -"2808 3902" -> "2812 3905" [label="[1]", style=solid]; -"2809 3897" -> "2810 3899" [label="[1]", style=solid]; -"2810 3899" -> "2811 3900" [label="[1]", style=solid]; -"2810 3899" -> "3304 3910" [label="[1]", style=solid]; -"2811 3900" -> "2812 3905" [label="[1]", style=solid]; -"2812 3905" -> "2813 3907" [label="[1, 3]", style=dashed]; -"2813 3907" -> "2814 3908" [label="[1, 1]", style=dashed]; -"2814 3908" -> "2815 3912" [label="[1]", style=dashed]; -"2814 3908" -> "3303 3909" [label="[1]", style=dashed]; -"2815 3912" -> "3176 6520" [label="[1]", style=solid]; -"2816 3838" -> "2817 3840" [label="[1]", style=dashed]; -"2817 3840" -> "2818 3841" [label="[1]", style=dashed]; -"2818 3841" -> "2819 3842" [label="[1]", style=solid]; -"2819 3842" -> "2820 3843" [label="[1, 1]", style=dashed]; -"2820 3843" -> "2821 3844" [label="[1, 1]", style=dashed]; -"2821 3844" -> "2822 3847" [label="[1]", style=dashed]; -"2821 3844" -> "2828 3855" [label="[1]", style=dashed]; -"2822 3847" -> "2824 3848" [label="[1]", style=dashed]; -"2823 3846" -> "2824 3848" [label="[1]", style=solid]; -"2824 3848" -> "2825 3857" [label="[1]", style=solid]; -"2824 3848" -> "2833 3868" [label="[1]", style=solid]; -"2825 3857" -> "2826 3858" [label="[1]", style=solid]; -"2826 3858" -> "2830 3861" [label="[1]", style=solid]; -"2827 3853" -> "2828 3855" [label="[1]", style=solid]; -"2828 3855" -> "2829 3856" [label="[1]", style=solid]; -"2828 3855" -> "3306 3866" [label="[1]", style=solid]; -"2829 3856" -> "2830 3861" [label="[1]", style=solid]; -"2830 3861" -> "2831 3863" [label="[1, 3]", style=dashed]; -"2831 3863" -> "2832 3864" [label="[1, 1]", style=dashed]; -"2832 3864" -> "2833 3868" [label="[1]", style=dashed]; -"2832 3864" -> "3305 3865" [label="[1]", style=dashed]; -"2833 3868" -> "3176 6520" [label="[1]", style=solid]; -"2834 3794" -> "2835 3796" [label="[1]", style=dashed]; -"2835 3796" -> "2836 3797" [label="[1]", style=dashed]; -"2836 3797" -> "2837 3798" [label="[1]", style=solid]; -"2837 3798" -> "2838 3799" [label="[1, 1]", style=dashed]; -"2838 3799" -> "2839 3800" [label="[1, 1]", style=dashed]; -"2839 3800" -> "2840 3803" [label="[1]", style=dashed]; -"2839 3800" -> "2846 3811" [label="[1]", style=dashed]; -"2840 3803" -> "2842 3804" [label="[1]", style=dashed]; -"2841 3802" -> "2842 3804" [label="[1]", style=solid]; -"2842 3804" -> "2843 3813" [label="[1]", style=solid]; -"2842 3804" -> "2851 3824" [label="[1]", style=solid]; -"2843 3813" -> "2844 3814" [label="[1]", style=solid]; -"2844 3814" -> "2848 3817" [label="[1]", style=solid]; -"2845 3809" -> "2846 3811" [label="[1]", style=solid]; -"2846 3811" -> "2847 3812" [label="[1]", style=solid]; -"2846 3811" -> "3308 3822" [label="[1]", style=solid]; -"2847 3812" -> "2848 3817" [label="[1]", style=solid]; -"2848 3817" -> "2849 3819" [label="[1, 3]", style=dashed]; -"2849 3819" -> "2850 3820" [label="[1, 1]", style=dashed]; -"2850 3820" -> "2851 3824" [label="[1]", style=dashed]; -"2850 3820" -> "3307 3821" [label="[1]", style=dashed]; -"2851 3824" -> "3176 6520" [label="[1]", style=solid]; -"2852 3750" -> "2853 3752" [label="[1]", style=dashed]; -"2853 3752" -> "2854 3753" [label="[1]", style=dashed]; -"2854 3753" -> "2855 3754" [label="[1]", style=solid]; -"2855 3754" -> "2856 3755" [label="[1, 1]", style=dashed]; -"2856 3755" -> "2857 3756" [label="[1, 1]", style=dashed]; -"2857 3756" -> "2858 3759" [label="[1]", style=dashed]; -"2857 3756" -> "2864 3767" [label="[1]", style=dashed]; -"2858 3759" -> "2860 3760" [label="[1]", style=dashed]; -"2859 3758" -> "2860 3760" [label="[1]", style=solid]; -"2860 3760" -> "2861 3769" [label="[1]", style=solid]; -"2860 3760" -> "2869 3780" [label="[1]", style=solid]; -"2861 3769" -> "2862 3770" [label="[1]", style=solid]; -"2862 3770" -> "2866 3773" [label="[1]", style=solid]; -"2863 3765" -> "2864 3767" [label="[1]", style=solid]; -"2864 3767" -> "2865 3768" [label="[1]", style=solid]; -"2864 3767" -> "3310 3778" [label="[1]", style=solid]; -"2865 3768" -> "2866 3773" [label="[1]", style=solid]; -"2866 3773" -> "2867 3775" [label="[1, 3]", style=dashed]; -"2867 3775" -> "2868 3776" [label="[1, 1]", style=dashed]; -"2868 3776" -> "2869 3780" [label="[1]", style=dashed]; -"2868 3776" -> "3309 3777" [label="[1]", style=dashed]; -"2869 3780" -> "3176 6520" [label="[1]", style=solid]; -"2870 3706" -> "2871 3708" [label="[1]", style=dashed]; -"2871 3708" -> "2872 3709" [label="[1]", style=dashed]; -"2872 3709" -> "2873 3710" [label="[1]", style=solid]; -"2873 3710" -> "2874 3711" [label="[1, 1]", style=dashed]; -"2874 3711" -> "2875 3712" [label="[1, 1]", style=dashed]; -"2875 3712" -> "2876 3715" [label="[1]", style=dashed]; -"2875 3712" -> "2882 3723" [label="[1]", style=dashed]; -"2876 3715" -> "2878 3716" [label="[1]", style=dashed]; -"2877 3714" -> "2878 3716" [label="[1]", style=solid]; -"2878 3716" -> "2879 3725" [label="[1]", style=solid]; -"2878 3716" -> "2887 3736" [label="[1]", style=solid]; -"2879 3725" -> "2880 3726" [label="[1]", style=solid]; -"2880 3726" -> "2884 3729" [label="[1]", style=solid]; -"2881 3721" -> "2882 3723" [label="[1]", style=solid]; -"2882 3723" -> "2883 3724" [label="[1]", style=solid]; -"2882 3723" -> "3312 3734" [label="[1]", style=solid]; -"2883 3724" -> "2884 3729" [label="[1]", style=solid]; -"2884 3729" -> "2885 3731" [label="[1, 3]", style=dashed]; -"2885 3731" -> "2886 3732" [label="[1, 1]", style=dashed]; -"2886 3732" -> "2887 3736" [label="[1]", style=dashed]; -"2886 3732" -> "3311 3733" [label="[1]", style=dashed]; -"2887 3736" -> "3176 6520" [label="[1]", style=solid]; -"2888 3662" -> "2889 3664" [label="[1]", style=dashed]; -"2889 3664" -> "2890 3665" [label="[1]", style=dashed]; -"2890 3665" -> "2891 3666" [label="[1]", style=solid]; -"2891 3666" -> "2892 3667" [label="[1, 1]", style=dashed]; -"2892 3667" -> "2893 3668" [label="[1, 1]", style=dashed]; -"2893 3668" -> "2894 3671" [label="[1]", style=dashed]; -"2893 3668" -> "2900 3679" [label="[1]", style=dashed]; -"2894 3671" -> "2896 3672" [label="[1]", style=dashed]; -"2895 3670" -> "2896 3672" [label="[1]", style=solid]; -"2896 3672" -> "2897 3681" [label="[1]", style=solid]; -"2896 3672" -> "2905 3692" [label="[1]", style=solid]; -"2897 3681" -> "2898 3682" [label="[1]", style=solid]; -"2898 3682" -> "2902 3685" [label="[1]", style=solid]; -"2899 3677" -> "2900 3679" [label="[1]", style=solid]; -"2900 3679" -> "2901 3680" [label="[1]", style=solid]; -"2900 3679" -> "3314 3690" [label="[1]", style=solid]; -"2901 3680" -> "2902 3685" [label="[1]", style=solid]; -"2902 3685" -> "2903 3687" [label="[1, 3]", style=dashed]; -"2903 3687" -> "2904 3688" [label="[1, 1]", style=dashed]; -"2904 3688" -> "2905 3692" [label="[1]", style=dashed]; -"2904 3688" -> "3313 3689" [label="[1]", style=dashed]; -"2905 3692" -> "3176 6520" [label="[1]", style=solid]; -"2906 3618" -> "2907 3620" [label="[1]", style=dashed]; -"2907 3620" -> "2908 3621" [label="[1]", style=dashed]; -"2908 3621" -> "2909 3622" [label="[1]", style=solid]; -"2909 3622" -> "2910 3623" [label="[1, 1]", style=dashed]; -"2910 3623" -> "2911 3624" [label="[1, 1]", style=dashed]; -"2911 3624" -> "2912 3627" [label="[1]", style=dashed]; -"2911 3624" -> "2918 3635" [label="[1]", style=dashed]; -"2912 3627" -> "2914 3628" [label="[1]", style=dashed]; -"2913 3626" -> "2914 3628" [label="[1]", style=solid]; -"2914 3628" -> "2915 3637" [label="[1]", style=solid]; -"2914 3628" -> "2923 3648" [label="[1]", style=solid]; -"2915 3637" -> "2916 3638" [label="[1]", style=solid]; -"2916 3638" -> "2920 3641" [label="[1]", style=solid]; -"2917 3633" -> "2918 3635" [label="[1]", style=solid]; -"2918 3635" -> "2919 3636" [label="[1]", style=solid]; -"2918 3635" -> "3316 3646" [label="[1]", style=solid]; -"2919 3636" -> "2920 3641" [label="[1]", style=solid]; -"2920 3641" -> "2921 3643" [label="[1, 3]", style=dashed]; -"2921 3643" -> "2922 3644" [label="[1, 1]", style=dashed]; -"2922 3644" -> "2923 3648" [label="[1]", style=dashed]; -"2922 3644" -> "3315 3645" [label="[1]", style=dashed]; -"2923 3648" -> "3176 6520" [label="[1]", style=solid]; -"2924 3574" -> "2925 3576" [label="[1]", style=dashed]; -"2925 3576" -> "2926 3577" [label="[1]", style=dashed]; -"2926 3577" -> "2927 3578" [label="[1]", style=solid]; -"2927 3578" -> "2928 3579" [label="[1, 1]", style=dashed]; -"2928 3579" -> "2929 3580" [label="[1, 1]", style=dashed]; -"2929 3580" -> "2930 3583" [label="[1]", style=dashed]; -"2929 3580" -> "2936 3591" [label="[1]", style=dashed]; -"2930 3583" -> "2932 3584" [label="[1]", style=dashed]; -"2931 3582" -> "2932 3584" [label="[1]", style=solid]; -"2932 3584" -> "2933 3593" [label="[1]", style=solid]; -"2932 3584" -> "2941 3604" [label="[1]", style=solid]; -"2933 3593" -> "2934 3594" [label="[1]", style=solid]; -"2934 3594" -> "2938 3597" [label="[1]", style=solid]; -"2935 3589" -> "2936 3591" [label="[1]", style=solid]; -"2936 3591" -> "2937 3592" [label="[1]", style=solid]; -"2936 3591" -> "3318 3602" [label="[1]", style=solid]; -"2937 3592" -> "2938 3597" [label="[1]", style=solid]; -"2938 3597" -> "2939 3599" [label="[1, 3]", style=dashed]; -"2939 3599" -> "2940 3600" [label="[1, 1]", style=dashed]; -"2940 3600" -> "2941 3604" [label="[1]", style=dashed]; -"2940 3600" -> "3317 3601" [label="[1]", style=dashed]; -"2941 3604" -> "3176 6520" [label="[1]", style=solid]; -"2942 3530" -> "2943 3532" [label="[1]", style=dashed]; -"2943 3532" -> "2944 3533" [label="[1]", style=dashed]; -"2944 3533" -> "2945 3534" [label="[1]", style=solid]; -"2945 3534" -> "2946 3535" [label="[1, 1]", style=dashed]; -"2946 3535" -> "2947 3536" [label="[1, 1]", style=dashed]; -"2947 3536" -> "2948 3539" [label="[1]", style=dashed]; -"2947 3536" -> "2954 3547" [label="[1]", style=dashed]; -"2948 3539" -> "2950 3540" [label="[1]", style=dashed]; -"2949 3538" -> "2950 3540" [label="[1]", style=solid]; -"2950 3540" -> "2951 3549" [label="[1]", style=solid]; -"2950 3540" -> "2959 3560" [label="[1]", style=solid]; -"2951 3549" -> "2952 3550" [label="[1]", style=solid]; -"2952 3550" -> "2956 3553" [label="[1]", style=solid]; -"2953 3545" -> "2954 3547" [label="[1]", style=solid]; -"2954 3547" -> "2955 3548" [label="[1]", style=solid]; -"2954 3547" -> "3320 3558" [label="[1]", style=solid]; -"2955 3548" -> "2956 3553" [label="[1]", style=solid]; -"2956 3553" -> "2957 3555" [label="[1, 3]", style=dashed]; -"2957 3555" -> "2958 3556" [label="[1, 1]", style=dashed]; -"2958 3556" -> "2959 3560" [label="[1]", style=dashed]; -"2958 3556" -> "3319 3557" [label="[1]", style=dashed]; -"2959 3560" -> "3176 6520" [label="[1]", style=solid]; -"2960 3486" -> "2961 3488" [label="[1]", style=dashed]; -"2961 3488" -> "2962 3489" [label="[1]", style=dashed]; -"2962 3489" -> "2963 3490" [label="[1]", style=solid]; -"2963 3490" -> "2964 3491" [label="[1, 1]", style=dashed]; -"2964 3491" -> "2965 3492" [label="[1, 1]", style=dashed]; -"2965 3492" -> "2966 3495" [label="[1]", style=dashed]; -"2965 3492" -> "2972 3503" [label="[1]", style=dashed]; -"2966 3495" -> "2968 3496" [label="[1]", style=dashed]; -"2967 3494" -> "2968 3496" [label="[1]", style=solid]; -"2968 3496" -> "2969 3505" [label="[1]", style=solid]; -"2968 3496" -> "2977 3516" [label="[1]", style=solid]; -"2969 3505" -> "2970 3506" [label="[1]", style=solid]; -"2970 3506" -> "2974 3509" [label="[1]", style=solid]; -"2971 3501" -> "2972 3503" [label="[1]", style=solid]; -"2972 3503" -> "2973 3504" [label="[1]", style=solid]; -"2972 3503" -> "3322 3514" [label="[1]", style=solid]; -"2973 3504" -> "2974 3509" [label="[1]", style=solid]; -"2974 3509" -> "2975 3511" [label="[1, 3]", style=dashed]; -"2975 3511" -> "2976 3512" [label="[1, 1]", style=dashed]; -"2976 3512" -> "2977 3516" [label="[1]", style=dashed]; -"2976 3512" -> "3321 3513" [label="[1]", style=dashed]; -"2977 3516" -> "3176 6520" [label="[1]", style=solid]; -"2978 3442" -> "2979 3444" [label="[1]", style=dashed]; -"2979 3444" -> "2980 3445" [label="[1]", style=dashed]; -"2980 3445" -> "2981 3446" [label="[1]", style=solid]; -"2981 3446" -> "2982 3447" [label="[1, 1]", style=dashed]; -"2982 3447" -> "2983 3448" [label="[1, 1]", style=dashed]; -"2983 3448" -> "2984 3451" [label="[1]", style=dashed]; -"2983 3448" -> "2990 3459" [label="[1]", style=dashed]; -"2984 3451" -> "2986 3452" [label="[1]", style=dashed]; -"2985 3450" -> "2986 3452" [label="[1]", style=solid]; -"2986 3452" -> "2987 3461" [label="[1]", style=solid]; -"2986 3452" -> "2995 3472" [label="[1]", style=solid]; -"2987 3461" -> "2988 3462" [label="[1]", style=solid]; -"2988 3462" -> "2992 3465" [label="[1]", style=solid]; -"2989 3457" -> "2990 3459" [label="[1]", style=solid]; -"2990 3459" -> "2991 3460" [label="[1]", style=solid]; -"2990 3459" -> "3324 3470" [label="[1]", style=solid]; -"2991 3460" -> "2992 3465" [label="[1]", style=solid]; -"2992 3465" -> "2993 3467" [label="[1, 3]", style=dashed]; -"2993 3467" -> "2994 3468" [label="[1, 1]", style=dashed]; -"2994 3468" -> "2995 3472" [label="[1]", style=dashed]; -"2994 3468" -> "3323 3469" [label="[1]", style=dashed]; -"2995 3472" -> "3176 6520" [label="[1]", style=solid]; -"2996 3398" -> "2997 3400" [label="[1]", style=dashed]; -"2997 3400" -> "2998 3401" [label="[1]", style=dashed]; -"2998 3401" -> "2999 3402" [label="[1]", style=solid]; -"2999 3402" -> "3000 3403" [label="[1, 1]", style=dashed]; -"3000 3403" -> "3001 3404" [label="[1, 1]", style=dashed]; -"3001 3404" -> "3002 3407" [label="[1]", style=dashed]; -"3001 3404" -> "3008 3415" [label="[1]", style=dashed]; -"3002 3407" -> "3004 3408" [label="[1]", style=dashed]; -"3003 3406" -> "3004 3408" [label="[1]", style=solid]; -"3004 3408" -> "3005 3417" [label="[1]", style=solid]; -"3004 3408" -> "3013 3428" [label="[1]", style=solid]; -"3005 3417" -> "3006 3418" [label="[1]", style=solid]; -"3006 3418" -> "3010 3421" [label="[1]", style=solid]; -"3007 3413" -> "3008 3415" [label="[1]", style=solid]; -"3008 3415" -> "3009 3416" [label="[1]", style=solid]; -"3008 3415" -> "3326 3426" [label="[1]", style=solid]; -"3009 3416" -> "3010 3421" [label="[1]", style=solid]; -"3010 3421" -> "3011 3423" [label="[1, 3]", style=dashed]; -"3011 3423" -> "3012 3424" [label="[1, 1]", style=dashed]; -"3012 3424" -> "3013 3428" [label="[1]", style=dashed]; -"3012 3424" -> "3325 3425" [label="[1]", style=dashed]; -"3013 3428" -> "3176 6520" [label="[1]", style=solid]; -"3014 3354" -> "3015 3356" [label="[1]", style=dashed]; -"3015 3356" -> "3016 3357" [label="[1]", style=dashed]; -"3016 3357" -> "3017 3358" [label="[1]", style=solid]; -"3017 3358" -> "3018 3359" [label="[1, 1]", style=dashed]; -"3018 3359" -> "3019 3360" [label="[1, 1]", style=dashed]; -"3019 3360" -> "3020 3363" [label="[1]", style=dashed]; -"3019 3360" -> "3026 3371" [label="[1]", style=dashed]; -"3020 3363" -> "3022 3364" [label="[1]", style=dashed]; -"3021 3362" -> "3022 3364" [label="[1]", style=solid]; -"3022 3364" -> "3023 3373" [label="[1]", style=solid]; -"3022 3364" -> "3031 3384" [label="[1]", style=solid]; -"3023 3373" -> "3024 3374" [label="[1]", style=solid]; -"3024 3374" -> "3028 3377" [label="[1]", style=solid]; -"3025 3369" -> "3026 3371" [label="[1]", style=solid]; -"3026 3371" -> "3027 3372" [label="[1]", style=solid]; -"3026 3371" -> "3328 3382" [label="[1]", style=solid]; -"3027 3372" -> "3028 3377" [label="[1]", style=solid]; -"3028 3377" -> "3029 3379" [label="[1, 3]", style=dashed]; -"3029 3379" -> "3030 3380" [label="[1, 1]", style=dashed]; -"3030 3380" -> "3031 3384" [label="[1]", style=dashed]; -"3030 3380" -> "3327 3381" [label="[1]", style=dashed]; -"3031 3384" -> "3176 6520" [label="[1]", style=solid]; -"3032 3310" -> "3033 3312" [label="[1]", style=dashed]; -"3033 3312" -> "3034 3313" [label="[1]", style=dashed]; -"3034 3313" -> "3035 3314" [label="[1]", style=solid]; -"3035 3314" -> "3036 3315" [label="[1, 1]", style=dashed]; -"3036 3315" -> "3037 3316" [label="[1, 1]", style=dashed]; -"3037 3316" -> "3038 3319" [label="[1]", style=dashed]; -"3037 3316" -> "3044 3327" [label="[1]", style=dashed]; -"3038 3319" -> "3040 3320" [label="[1]", style=dashed]; -"3039 3318" -> "3040 3320" [label="[1]", style=solid]; -"3040 3320" -> "3041 3329" [label="[1]", style=solid]; -"3040 3320" -> "3049 3340" [label="[1]", style=solid]; -"3041 3329" -> "3042 3330" [label="[1]", style=solid]; -"3042 3330" -> "3046 3333" [label="[1]", style=solid]; -"3043 3325" -> "3044 3327" [label="[1]", style=solid]; -"3044 3327" -> "3045 3328" [label="[1]", style=solid]; -"3044 3327" -> "3330 3338" [label="[1]", style=solid]; -"3045 3328" -> "3046 3333" [label="[1]", style=solid]; -"3046 3333" -> "3047 3335" [label="[1, 3]", style=dashed]; -"3047 3335" -> "3048 3336" [label="[1, 1]", style=dashed]; -"3048 3336" -> "3049 3340" [label="[1]", style=dashed]; -"3048 3336" -> "3329 3337" [label="[1]", style=dashed]; -"3049 3340" -> "3176 6520" [label="[1]", style=solid]; -"3050 3266" -> "3051 3268" [label="[1]", style=dashed]; -"3051 3268" -> "3052 3269" [label="[1]", style=dashed]; -"3052 3269" -> "3053 3270" [label="[1]", style=solid]; -"3053 3270" -> "3054 3271" [label="[1, 1]", style=dashed]; -"3054 3271" -> "3055 3272" [label="[1, 1]", style=dashed]; -"3055 3272" -> "3056 3275" [label="[1]", style=dashed]; -"3055 3272" -> "3062 3283" [label="[1]", style=dashed]; -"3056 3275" -> "3058 3276" [label="[1]", style=dashed]; -"3057 3274" -> "3058 3276" [label="[1]", style=solid]; -"3058 3276" -> "3059 3285" [label="[1]", style=solid]; -"3058 3276" -> "3067 3296" [label="[1]", style=solid]; -"3059 3285" -> "3060 3286" [label="[1]", style=solid]; -"3060 3286" -> "3064 3289" [label="[1]", style=solid]; -"3061 3281" -> "3062 3283" [label="[1]", style=solid]; -"3062 3283" -> "3063 3284" [label="[1]", style=solid]; -"3062 3283" -> "3332 3294" [label="[1]", style=solid]; -"3063 3284" -> "3064 3289" [label="[1]", style=solid]; -"3064 3289" -> "3065 3291" [label="[1, 3]", style=dashed]; -"3065 3291" -> "3066 3292" [label="[1, 1]", style=dashed]; -"3066 3292" -> "3067 3296" [label="[1]", style=dashed]; -"3066 3292" -> "3331 3293" [label="[1]", style=dashed]; -"3067 3296" -> "3176 6520" [label="[1]", style=solid]; -"3068 3222" -> "3069 3224" [label="[1]", style=dashed]; -"3069 3224" -> "3070 3225" [label="[1]", style=dashed]; -"3070 3225" -> "3071 3226" [label="[1]", style=solid]; -"3071 3226" -> "3072 3227" [label="[1, 1]", style=dashed]; -"3072 3227" -> "3073 3228" [label="[1, 1]", style=dashed]; -"3073 3228" -> "3074 3231" [label="[1]", style=dashed]; -"3073 3228" -> "3080 3239" [label="[1]", style=dashed]; -"3074 3231" -> "3076 3232" [label="[1]", style=dashed]; -"3075 3230" -> "3076 3232" [label="[1]", style=solid]; -"3076 3232" -> "3077 3241" [label="[1]", style=solid]; -"3076 3232" -> "3085 3252" [label="[1]", style=solid]; -"3077 3241" -> "3078 3242" [label="[1]", style=solid]; -"3078 3242" -> "3082 3245" [label="[1]", style=solid]; -"3079 3237" -> "3080 3239" [label="[1]", style=solid]; -"3080 3239" -> "3081 3240" [label="[1]", style=solid]; -"3080 3239" -> "3334 3250" [label="[1]", style=solid]; -"3081 3240" -> "3082 3245" [label="[1]", style=solid]; -"3082 3245" -> "3083 3247" [label="[1, 3]", style=dashed]; -"3083 3247" -> "3084 3248" [label="[1, 1]", style=dashed]; -"3084 3248" -> "3085 3252" [label="[1]", style=dashed]; -"3084 3248" -> "3333 3249" [label="[1]", style=dashed]; -"3085 3252" -> "3176 6520" [label="[1]", style=solid]; -"3086 3178" -> "3087 3180" [label="[1]", style=dashed]; -"3087 3180" -> "3088 3181" [label="[1]", style=dashed]; -"3088 3181" -> "3089 3182" [label="[1]", style=solid]; -"3089 3182" -> "3090 3183" [label="[1, 1]", style=dashed]; -"3090 3183" -> "3091 3184" [label="[1, 1]", style=dashed]; -"3091 3184" -> "3092 3187" [label="[1]", style=dashed]; -"3091 3184" -> "3098 3195" [label="[1]", style=dashed]; -"3092 3187" -> "3094 3188" [label="[1]", style=dashed]; -"3093 3186" -> "3094 3188" [label="[1]", style=solid]; -"3094 3188" -> "3095 3197" [label="[1]", style=solid]; -"3094 3188" -> "3103 3208" [label="[1]", style=solid]; -"3095 3197" -> "3096 3198" [label="[1]", style=solid]; -"3096 3198" -> "3100 3201" [label="[1]", style=solid]; -"3097 3193" -> "3098 3195" [label="[1]", style=solid]; -"3098 3195" -> "3099 3196" [label="[1]", style=solid]; -"3098 3195" -> "3336 3206" [label="[1]", style=solid]; -"3099 3196" -> "3100 3201" [label="[1]", style=solid]; -"3100 3201" -> "3101 3203" [label="[1, 3]", style=dashed]; -"3101 3203" -> "3102 3204" [label="[1, 1]", style=dashed]; -"3102 3204" -> "3103 3208" [label="[1]", style=dashed]; -"3102 3204" -> "3335 3205" [label="[1]", style=dashed]; -"3103 3208" -> "3176 6520" [label="[1]", style=solid]; -"3104 3134" -> "3105 3136" [label="[1]", style=dashed]; -"3105 3136" -> "3106 3137" [label="[1]", style=dashed]; -"3106 3137" -> "3107 3138" [label="[1]", style=solid]; -"3107 3138" -> "3108 3139" [label="[1, 1]", style=dashed]; -"3108 3139" -> "3109 3140" [label="[1, 1]", style=dashed]; -"3109 3140" -> "3110 3143" [label="[1]", style=dashed]; -"3109 3140" -> "3116 3151" [label="[1]", style=dashed]; -"3110 3143" -> "3112 3144" [label="[1]", style=dashed]; -"3111 3142" -> "3112 3144" [label="[1]", style=solid]; -"3112 3144" -> "3113 3153" [label="[1]", style=solid]; -"3112 3144" -> "3121 3164" [label="[1]", style=solid]; -"3113 3153" -> "3114 3154" [label="[1]", style=solid]; -"3114 3154" -> "3118 3157" [label="[1]", style=solid]; -"3115 3149" -> "3116 3151" [label="[1]", style=solid]; -"3116 3151" -> "3117 3152" [label="[1]", style=solid]; -"3116 3151" -> "3338 3162" [label="[1]", style=solid]; -"3117 3152" -> "3118 3157" [label="[1]", style=solid]; -"3118 3157" -> "3119 3159" [label="[1, 3]", style=dashed]; -"3119 3159" -> "3120 3160" [label="[1, 1]", style=dashed]; -"3120 3160" -> "3121 3164" [label="[1]", style=dashed]; -"3120 3160" -> "3337 3161" [label="[1]", style=dashed]; -"3121 3164" -> "3176 6520" [label="[1]", style=solid]; -"3122 3090" -> "3123 3092" [label="[1]", style=dashed]; -"3123 3092" -> "3124 3093" [label="[1]", style=dashed]; -"3124 3093" -> "3125 3094" [label="[1]", style=solid]; -"3125 3094" -> "3126 3095" [label="[1, 1]", style=dashed]; -"3126 3095" -> "3127 3096" [label="[1, 1]", style=dashed]; -"3127 3096" -> "3128 3099" [label="[1]", style=dashed]; -"3127 3096" -> "3134 3107" [label="[1]", style=dashed]; -"3128 3099" -> "3130 3100" [label="[1]", style=dashed]; -"3129 3098" -> "3130 3100" [label="[1]", style=solid]; -"3130 3100" -> "3131 3109" [label="[1]", style=solid]; -"3130 3100" -> "3139 3120" [label="[1]", style=solid]; -"3131 3109" -> "3132 3110" [label="[1]", style=solid]; -"3132 3110" -> "3136 3113" [label="[1]", style=solid]; -"3133 3105" -> "3134 3107" [label="[1]", style=solid]; -"3134 3107" -> "3135 3108" [label="[1]", style=solid]; -"3134 3107" -> "3340 3118" [label="[1]", style=solid]; -"3135 3108" -> "3136 3113" [label="[1]", style=solid]; -"3136 3113" -> "3137 3115" [label="[1, 3]", style=dashed]; -"3137 3115" -> "3138 3116" [label="[1, 1]", style=dashed]; -"3138 3116" -> "3139 3120" [label="[1]", style=dashed]; -"3138 3116" -> "3339 3117" [label="[1]", style=dashed]; -"3139 3120" -> "3176 6520" [label="[1]", style=solid]; -"3140 3046" -> "3141 3048" [label="[1]", style=dashed]; -"3141 3048" -> "3142 3049" [label="[1]", style=dashed]; -"3142 3049" -> "3143 3050" [label="[1]", style=solid]; -"3143 3050" -> "3144 3051" [label="[1, 1]", style=dashed]; -"3144 3051" -> "3145 3052" [label="[1, 1]", style=dashed]; -"3145 3052" -> "3146 3055" [label="[1]", style=dashed]; -"3145 3052" -> "3152 3063" [label="[1]", style=dashed]; -"3146 3055" -> "3148 3056" [label="[1]", style=dashed]; -"3147 3054" -> "3148 3056" [label="[1]", style=solid]; -"3148 3056" -> "3149 3065" [label="[1]", style=solid]; -"3148 3056" -> "3157 3076" [label="[1]", style=solid]; -"3149 3065" -> "3150 3066" [label="[1]", style=solid]; -"3150 3066" -> "3154 3069" [label="[1]", style=solid]; -"3151 3061" -> "3152 3063" [label="[1]", style=solid]; -"3152 3063" -> "3153 3064" [label="[1]", style=solid]; -"3152 3063" -> "3342 3074" [label="[1]", style=solid]; -"3153 3064" -> "3154 3069" [label="[1]", style=solid]; -"3154 3069" -> "3155 3071" [label="[1, 3]", style=dashed]; -"3155 3071" -> "3156 3072" [label="[1, 1]", style=dashed]; -"3156 3072" -> "3157 3076" [label="[1]", style=dashed]; -"3156 3072" -> "3341 3073" [label="[1]", style=dashed]; -"3157 3076" -> "3176 6520" [label="[1]", style=solid]; -"3158 3002" -> "3159 3004" [label="[1]", style=dashed]; -"3159 3004" -> "3160 3005" [label="[1]", style=dashed]; -"3160 3005" -> "3161 3006" [label="[1]", style=solid]; -"3161 3006" -> "3162 3007" [label="[1, 1]", style=dashed]; -"3162 3007" -> "3163 3008" [label="[1, 1]", style=dashed]; -"3163 3008" -> "3164 3011" [label="[1]", style=dashed]; -"3163 3008" -> "3170 3019" [label="[1]", style=dashed]; -"3164 3011" -> "3166 3012" [label="[1]", style=dashed]; -"3165 3010" -> "3166 3012" [label="[1]", style=solid]; -"3166 3012" -> "3167 3021" [label="[1]", style=solid]; -"3166 3012" -> "3175 3032" [label="[1]", style=solid]; -"3167 3021" -> "3168 3022" [label="[1]", style=solid]; -"3168 3022" -> "3172 3025" [label="[1]", style=solid]; -"3169 3017" -> "3170 3019" [label="[1]", style=solid]; -"3170 3019" -> "3171 3020" [label="[1]", style=solid]; -"3170 3019" -> "3344 3030" [label="[1]", style=solid]; -"3171 3020" -> "3172 3025" [label="[1]", style=solid]; -"3172 3025" -> "3173 3027" [label="[1, 3]", style=dashed]; -"3173 3027" -> "3174 3028" [label="[1, 1]", style=dashed]; -"3174 3028" -> "3175 3032" [label="[1]", style=dashed]; -"3174 3028" -> "3343 3029" [label="[1]", style=dashed]; -"3175 3032" -> "3176 6520" [label="[1]", style=solid]; -"3176 6520" -> "3177 6521" [label="[1]", style=solid]; -"3176 6520" -> "3183 6528" [label="[1]", style=solid]; -"3176 6520" -> "3885 6534" [label="[1]", style=solid]; -"3177 6521" -> "3178 6523" [label="[1]", style=dashed]; -"3178 6523" -> "3179 6524" [label="[1]", style=dashed]; -"3179 6524" -> "3180 6525" [label="[1]", style=dashed]; +"1659 2773_MatMul" -> "1660 2773_Add" [label="[]", style=solid]; +"1660 2773_Add" -> "1661 2776" [label="[]", style=solid]; +"1661 2776" -> "1662 2947" [label="[]", style=solid]; +"1661 2776" -> "1673 2872" [label="[]", style=solid]; +"1661 2776" -> "1688 2848" [label="[]", style=solid]; +"1661 2776" -> "1704 2860" [label="[]", style=solid]; +"1661 2776" -> "1719 2836" [label="[]", style=solid]; +"1662 2947" -> "1734 2948" [label="[-1]", style=dashed]; +"1663 2775" -> "1664 2777" [label="[]", style=solid]; +"1664 2777" -> "1665 2806" [label="[]", style=solid]; +"1664 2777" -> "1667 2799" [label="[]", style=solid]; +"1664 2777" -> "1681 2826" [label="[]", style=solid]; +"1664 2777" -> "1696 2789" [label="[]", style=solid]; +"1664 2777" -> "1698 2782" [label="[]", style=solid]; +"1664 2777" -> "1712 2816" [label="[]", style=solid]; +"1665 2806" -> "1666 2808" [label="[]", style=solid]; +"1666 2808" -> "1669 2809" [label="[]", style=solid]; +"1667 2799" -> "1668 2801" [label="[]", style=solid]; +"1668 2801" -> "1669 2809" [label="[]", style=solid]; +"1669 2809" -> "1670 2811" [label="[]", style=solid]; +"1670 2811" -> "1671 2923" [label="[]", style=solid]; +"1670 2811" -> "1680 2830" [label="[]", style=solid]; +"1670 2811" -> "1686 2900" [label="[]", style=solid]; +"1671 2923" -> "1672 2924" [label="[]", style=solid]; +"1672 2924" -> "1678 2925" [label="[]", style=solid]; +"1673 2872" -> "1674 2877" [label="[]", style=solid]; +"1674 2877" -> "1675 2879" [label="[]", style=solid]; +"1675 2879" -> "1676 2881" [label="[]", style=solid]; +"1676 2881" -> "1677 2918" [label="[]", style=solid]; +"1677 2918" -> "1678 2925" [label="[]", style=solid]; +"1678 2925" -> "1679 2938" [label="[]", style=solid]; +"1678 2925" -> "1727 2930" [label="[]", style=solid]; +"1679 2938" -> "1693 2939" [label="[]", style=solid]; +"1680 2830" -> "1683 2831" [label="[]", style=solid]; +"1681 2826" -> "1682 2828" [label="[]", style=solid]; +"1682 2828" -> "1683 2831" [label="[]", style=solid]; +"1683 2831" -> "1684 2907" [label="[]", style=solid]; +"1684 2907" -> "1685 2908" [label="[]", style=solid]; +"1685 2908" -> "1692 2909" [label="[]", style=solid]; +"1686 2900" -> "1687 2901" [label="[]", style=solid]; +"1687 2901" -> "1691 2902" [label="[]", style=solid]; +"1688 2848" -> "1689 2853" [label="[]", style=solid]; +"1689 2853" -> "1690 2855" [label="[]", style=solid]; +"1690 2855" -> "1691 2902" [label="[]", style=solid]; +"1691 2902" -> "1692 2909" [label="[]", style=solid]; +"1692 2909" -> "1693 2939" [label="[]", style=solid]; +"1692 2909" -> "1728 2931" [label="[]", style=solid]; +"1693 2939" -> "1694 2941" [label="[]", style=solid]; +"1694 2941" -> "1695 2945" [label="[]", style=solid]; +"1695 2945" -> "1733 2946" [label="[]", style=solid]; +"1696 2789" -> "1697 2791" [label="[]", style=solid]; +"1697 2791" -> "1700 2792" [label="[]", style=solid]; +"1698 2782" -> "1699 2784" [label="[]", style=solid]; +"1699 2784" -> "1700 2792" [label="[]", style=solid]; +"1700 2792" -> "1701 2794" [label="[]", style=solid]; +"1701 2794" -> "1702 2915" [label="[]", style=solid]; +"1701 2794" -> "1711 2820" [label="[]", style=solid]; +"1701 2794" -> "1717 2886" [label="[]", style=solid]; +"1702 2915" -> "1703 2916" [label="[]", style=solid]; +"1703 2916" -> "1709 2917" [label="[]", style=solid]; +"1704 2860" -> "1705 2865" [label="[]", style=solid]; +"1705 2865" -> "1706 2867" [label="[]", style=solid]; +"1706 2867" -> "1707 2880" [label="[]", style=solid]; +"1707 2880" -> "1708 2910" [label="[]", style=solid]; +"1708 2910" -> "1709 2917" [label="[]", style=solid]; +"1709 2917" -> "1710 2933" [label="[]", style=solid]; +"1709 2917" -> "1730 2927" [label="[]", style=solid]; +"1710 2933" -> "1724 2934" [label="[]", style=solid]; +"1711 2820" -> "1714 2821" [label="[]", style=solid]; +"1712 2816" -> "1713 2818" [label="[]", style=solid]; +"1713 2818" -> "1714 2821" [label="[]", style=solid]; +"1714 2821" -> "1715 2893" [label="[]", style=solid]; +"1715 2893" -> "1716 2894" [label="[]", style=solid]; +"1716 2894" -> "1723 2895" [label="[]", style=solid]; +"1717 2886" -> "1718 2887" [label="[]", style=solid]; +"1718 2887" -> "1722 2888" [label="[]", style=solid]; +"1719 2836" -> "1720 2841" [label="[]", style=solid]; +"1720 2841" -> "1721 2843" [label="[]", style=solid]; +"1721 2843" -> "1722 2888" [label="[]", style=solid]; +"1722 2888" -> "1723 2895" [label="[]", style=solid]; +"1723 2895" -> "1724 2934" [label="[]", style=solid]; +"1723 2895" -> "1731 2928" [label="[]", style=solid]; +"1724 2934" -> "1725 2936" [label="[]", style=solid]; +"1725 2936" -> "1726 2944" [label="[]", style=solid]; +"1726 2944" -> "1733 2946" [label="[]", style=solid]; +"1727 2930" -> "1728 2931" [label="[]", style=solid]; +"1728 2931" -> "1729 2943" [label="[]", style=solid]; +"1729 2943" -> "1733 2946" [label="[]", style=solid]; +"1730 2927" -> "1731 2928" [label="[]", style=solid]; +"1731 2928" -> "1732 2942" [label="[]", style=solid]; +"1732 2942" -> "1733 2946" [label="[]", style=solid]; +"1733 2946" -> "1734 2948" [label="[]", style=solid]; +"1734 2948" -> "1735 2953" [label="[]", style=solid]; +"1735 2953" -> "1736 2971" [label="[-1, 4]", style=solid]; +"1735 2953" -> "1740 2960" [label="[-1, 4]", style=solid]; +"1736 2971" -> "1737 2976" [label="[-1, 4]", style=solid]; +"1737 2976" -> "1738 2977" [label="[-1, 2]", style=solid]; +"1738 2977" -> "1739 2979" [label="[-1, 2]", style=solid]; +"1739 2979" -> "1744 2980" [label="[-1, 2, 1]", style=solid]; +"1740 2960" -> "1741 2965" [label="[-1, 4]", style=solid]; +"1741 2965" -> "1742 2966" [label="[-1, 2]", style=solid]; +"1742 2966" -> "1743 2978" [label="[-1, 2]", style=solid]; +"1743 2978" -> "1744 2980" [label="[-1, 2, 1]", style=solid]; +"1744 2980" -> "1745 2982" [label="[-1, 2, 2]", style=solid]; +"1745 2982" -> "1746 2989" [label="[-1, 4]", style=solid]; +"1746 2989" -> "1747 6493" [label="[]", style=solid]; +"1746 2989" -> "1765 6449" [label="[]", style=solid]; +"1746 2989" -> "1783 6405" [label="[]", style=solid]; +"1746 2989" -> "1801 6361" [label="[]", style=solid]; +"1746 2989" -> "1819 6317" [label="[]", style=solid]; +"1746 2989" -> "1837 6273" [label="[]", style=solid]; +"1746 2989" -> "1855 6229" [label="[]", style=solid]; +"1746 2989" -> "1873 6185" [label="[]", style=solid]; +"1746 2989" -> "1891 6141" [label="[]", style=solid]; +"1746 2989" -> "1909 6097" [label="[]", style=solid]; +"1746 2989" -> "1927 6053" [label="[]", style=solid]; +"1746 2989" -> "1945 6009" [label="[]", style=solid]; +"1746 2989" -> "1963 5965" [label="[]", style=solid]; +"1746 2989" -> "1981 5921" [label="[]", style=solid]; +"1746 2989" -> "1999 5877" [label="[]", style=solid]; +"1746 2989" -> "2017 5833" [label="[]", style=solid]; +"1746 2989" -> "2035 5789" [label="[]", style=solid]; +"1746 2989" -> "2053 5745" [label="[]", style=solid]; +"1746 2989" -> "2071 5701" [label="[]", style=solid]; +"1746 2989" -> "2089 5657" [label="[]", style=solid]; +"1746 2989" -> "2107 5613" [label="[]", style=solid]; +"1746 2989" -> "2125 5569" [label="[]", style=solid]; +"1746 2989" -> "2143 5525" [label="[]", style=solid]; +"1746 2989" -> "2161 5481" [label="[]", style=solid]; +"1746 2989" -> "2179 5437" [label="[]", style=solid]; +"1746 2989" -> "2197 5393" [label="[]", style=solid]; +"1746 2989" -> "2215 5349" [label="[]", style=solid]; +"1746 2989" -> "2233 5305" [label="[]", style=solid]; +"1746 2989" -> "2251 5261" [label="[]", style=solid]; +"1746 2989" -> "2269 5217" [label="[]", style=solid]; +"1746 2989" -> "2287 5173" [label="[]", style=solid]; +"1746 2989" -> "2305 5129" [label="[]", style=solid]; +"1746 2989" -> "2323 5085" [label="[]", style=solid]; +"1746 2989" -> "2341 5041" [label="[]", style=solid]; +"1746 2989" -> "2359 4997" [label="[]", style=solid]; +"1746 2989" -> "2377 4953" [label="[]", style=solid]; +"1746 2989" -> "2395 4909" [label="[]", style=solid]; +"1746 2989" -> "2413 4865" [label="[]", style=solid]; +"1746 2989" -> "2431 4821" [label="[]", style=solid]; +"1746 2989" -> "2449 4777" [label="[]", style=solid]; +"1746 2989" -> "2467 4733" [label="[]", style=solid]; +"1746 2989" -> "2485 4689" [label="[]", style=solid]; +"1746 2989" -> "2503 4645" [label="[]", style=solid]; +"1746 2989" -> "2521 4601" [label="[]", style=solid]; +"1746 2989" -> "2539 4557" [label="[]", style=solid]; +"1746 2989" -> "2557 4513" [label="[]", style=solid]; +"1746 2989" -> "2575 4469" [label="[]", style=solid]; +"1746 2989" -> "2593 4425" [label="[]", style=solid]; +"1746 2989" -> "2611 4381" [label="[]", style=solid]; +"1746 2989" -> "2629 4337" [label="[]", style=solid]; +"1746 2989" -> "2647 4293" [label="[]", style=solid]; +"1746 2989" -> "2665 4249" [label="[]", style=solid]; +"1746 2989" -> "2683 4205" [label="[]", style=solid]; +"1746 2989" -> "2701 4161" [label="[]", style=solid]; +"1746 2989" -> "2719 4117" [label="[]", style=solid]; +"1746 2989" -> "2737 4073" [label="[]", style=solid]; +"1746 2989" -> "2755 4029" [label="[]", style=solid]; +"1746 2989" -> "2773 3985" [label="[]", style=solid]; +"1746 2989" -> "2791 3941" [label="[]", style=solid]; +"1746 2989" -> "2809 3897" [label="[]", style=solid]; +"1746 2989" -> "2827 3853" [label="[]", style=solid]; +"1746 2989" -> "2845 3809" [label="[]", style=solid]; +"1746 2989" -> "2863 3765" [label="[]", style=solid]; +"1746 2989" -> "2881 3721" [label="[]", style=solid]; +"1746 2989" -> "2899 3677" [label="[]", style=solid]; +"1746 2989" -> "2917 3633" [label="[]", style=solid]; +"1746 2989" -> "2935 3589" [label="[]", style=solid]; +"1746 2989" -> "2953 3545" [label="[]", style=solid]; +"1746 2989" -> "2971 3501" [label="[]", style=solid]; +"1746 2989" -> "2989 3457" [label="[]", style=solid]; +"1746 2989" -> "3007 3413" [label="[]", style=solid]; +"1746 2989" -> "3025 3369" [label="[]", style=solid]; +"1746 2989" -> "3043 3325" [label="[]", style=solid]; +"1746 2989" -> "3061 3281" [label="[]", style=solid]; +"1746 2989" -> "3079 3237" [label="[]", style=solid]; +"1746 2989" -> "3097 3193" [label="[]", style=solid]; +"1746 2989" -> "3115 3149" [label="[]", style=solid]; +"1746 2989" -> "3133 3105" [label="[]", style=solid]; +"1746 2989" -> "3151 3061" [label="[]", style=solid]; +"1746 2989" -> "3169 3017" [label="[]", style=solid]; +"1747 6493" -> "1748 6495" [label="[]", style=solid]; +"1748 6495" -> "1749 6496" [label="[]", style=solid]; +"1748 6495" -> "3186 6506" [label="[]", style=solid]; +"1749 6496" -> "1750 6501" [label="[]", style=solid]; +"1750 6501" -> "1751 6503" [label="[-1, 3]", style=dashed]; +"1751 6503" -> "1752 6504" [label="[-1, 1]", style=dashed]; +"1752 6504" -> "1753 6508" [label="[-1]", style=dashed]; +"1752 6504" -> "3185 6505" [label="[-1]", style=dashed]; +"1753 6508" -> "3176 6520" [label="[]", style=solid]; +"1754 6434" -> "1755 6436" [label="[]", style=dashed]; +"1755 6436" -> "1756 6437" [label="[]", style=dashed]; +"1756 6437" -> "1757 6438" [label="[]", style=solid]; +"1757 6438" -> "1758 6439" [label="[-1, -1]", style=dashed]; +"1758 6439" -> "1759 6440" [label="[-1, -1]", style=dashed]; +"1759 6440" -> "1760 6443" [label="[-1]", style=dashed]; +"1759 6440" -> "1766 6451" [label="[-1]", style=dashed]; +"1760 6443" -> "1762 6444" [label="[-1]", style=dashed]; +"1761 6442" -> "1762 6444" [label="[]", style=solid]; +"1762 6444" -> "1763 6453" [label="[]", style=solid]; +"1762 6444" -> "1771 6464" [label="[]", style=solid]; +"1763 6453" -> "1764 6454" [label="[]", style=solid]; +"1764 6454" -> "1768 6457" [label="[]", style=solid]; +"1765 6449" -> "1766 6451" [label="[]", style=solid]; +"1766 6451" -> "1767 6452" [label="[]", style=solid]; +"1766 6451" -> "3188 6462" [label="[]", style=solid]; +"1767 6452" -> "1768 6457" [label="[]", style=solid]; +"1768 6457" -> "1769 6459" [label="[-1, 3]", style=dashed]; +"1769 6459" -> "1770 6460" [label="[-1, 1]", style=dashed]; +"1770 6460" -> "1771 6464" [label="[-1]", style=dashed]; +"1770 6460" -> "3187 6461" [label="[-1]", style=dashed]; +"1771 6464" -> "3176 6520" [label="[]", style=solid]; +"1772 6390" -> "1773 6392" [label="[]", style=dashed]; +"1773 6392" -> "1774 6393" [label="[]", style=dashed]; +"1774 6393" -> "1775 6394" [label="[]", style=solid]; +"1775 6394" -> "1776 6395" [label="[-1, -1]", style=dashed]; +"1776 6395" -> "1777 6396" [label="[-1, -1]", style=dashed]; +"1777 6396" -> "1778 6399" [label="[-1]", style=dashed]; +"1777 6396" -> "1784 6407" [label="[-1]", style=dashed]; +"1778 6399" -> "1780 6400" [label="[-1]", style=dashed]; +"1779 6398" -> "1780 6400" [label="[]", style=solid]; +"1780 6400" -> "1781 6409" [label="[]", style=solid]; +"1780 6400" -> "1789 6420" [label="[]", style=solid]; +"1781 6409" -> "1782 6410" [label="[]", style=solid]; +"1782 6410" -> "1786 6413" [label="[]", style=solid]; +"1783 6405" -> "1784 6407" [label="[]", style=solid]; +"1784 6407" -> "1785 6408" [label="[]", style=solid]; +"1784 6407" -> "3190 6418" [label="[]", style=solid]; +"1785 6408" -> "1786 6413" [label="[]", style=solid]; +"1786 6413" -> "1787 6415" [label="[-1, 3]", style=dashed]; +"1787 6415" -> "1788 6416" [label="[-1, 1]", style=dashed]; +"1788 6416" -> "1789 6420" [label="[-1]", style=dashed]; +"1788 6416" -> "3189 6417" [label="[-1]", style=dashed]; +"1789 6420" -> "3176 6520" [label="[]", style=solid]; +"1790 6346" -> "1791 6348" [label="[]", style=dashed]; +"1791 6348" -> "1792 6349" [label="[]", style=dashed]; +"1792 6349" -> "1793 6350" [label="[]", style=solid]; +"1793 6350" -> "1794 6351" [label="[-1, -1]", style=dashed]; +"1794 6351" -> "1795 6352" [label="[-1, -1]", style=dashed]; +"1795 6352" -> "1796 6355" [label="[-1]", style=dashed]; +"1795 6352" -> "1802 6363" [label="[-1]", style=dashed]; +"1796 6355" -> "1798 6356" [label="[-1]", style=dashed]; +"1797 6354" -> "1798 6356" [label="[]", style=solid]; +"1798 6356" -> "1799 6365" [label="[]", style=solid]; +"1798 6356" -> "1807 6376" [label="[]", style=solid]; +"1799 6365" -> "1800 6366" [label="[]", style=solid]; +"1800 6366" -> "1804 6369" [label="[]", style=solid]; +"1801 6361" -> "1802 6363" [label="[]", style=solid]; +"1802 6363" -> "1803 6364" [label="[]", style=solid]; +"1802 6363" -> "3192 6374" [label="[]", style=solid]; +"1803 6364" -> "1804 6369" [label="[]", style=solid]; +"1804 6369" -> "1805 6371" [label="[-1, 3]", style=dashed]; +"1805 6371" -> "1806 6372" [label="[-1, 1]", style=dashed]; +"1806 6372" -> "1807 6376" [label="[-1]", style=dashed]; +"1806 6372" -> "3191 6373" [label="[-1]", style=dashed]; +"1807 6376" -> "3176 6520" [label="[]", style=solid]; +"1808 6302" -> "1809 6304" [label="[]", style=dashed]; +"1809 6304" -> "1810 6305" [label="[]", style=dashed]; +"1810 6305" -> "1811 6306" [label="[]", style=solid]; +"1811 6306" -> "1812 6307" [label="[-1, -1]", style=dashed]; +"1812 6307" -> "1813 6308" [label="[-1, -1]", style=dashed]; +"1813 6308" -> "1814 6311" [label="[-1]", style=dashed]; +"1813 6308" -> "1820 6319" [label="[-1]", style=dashed]; +"1814 6311" -> "1816 6312" [label="[-1]", style=dashed]; +"1815 6310" -> "1816 6312" [label="[]", style=solid]; +"1816 6312" -> "1817 6321" [label="[]", style=solid]; +"1816 6312" -> "1825 6332" [label="[]", style=solid]; +"1817 6321" -> "1818 6322" [label="[]", style=solid]; +"1818 6322" -> "1822 6325" [label="[]", style=solid]; +"1819 6317" -> "1820 6319" [label="[]", style=solid]; +"1820 6319" -> "1821 6320" [label="[]", style=solid]; +"1820 6319" -> "3194 6330" [label="[]", style=solid]; +"1821 6320" -> "1822 6325" [label="[]", style=solid]; +"1822 6325" -> "1823 6327" [label="[-1, 3]", style=dashed]; +"1823 6327" -> "1824 6328" [label="[-1, 1]", style=dashed]; +"1824 6328" -> "1825 6332" [label="[-1]", style=dashed]; +"1824 6328" -> "3193 6329" [label="[-1]", style=dashed]; +"1825 6332" -> "3176 6520" [label="[]", style=solid]; +"1826 6258" -> "1827 6260" [label="[]", style=dashed]; +"1827 6260" -> "1828 6261" [label="[]", style=dashed]; +"1828 6261" -> "1829 6262" [label="[]", style=solid]; +"1829 6262" -> "1830 6263" [label="[-1, -1]", style=dashed]; +"1830 6263" -> "1831 6264" [label="[-1, -1]", style=dashed]; +"1831 6264" -> "1832 6267" [label="[-1]", style=dashed]; +"1831 6264" -> "1838 6275" [label="[-1]", style=dashed]; +"1832 6267" -> "1834 6268" [label="[-1]", style=dashed]; +"1833 6266" -> "1834 6268" [label="[]", style=solid]; +"1834 6268" -> "1835 6277" [label="[]", style=solid]; +"1834 6268" -> "1843 6288" [label="[]", style=solid]; +"1835 6277" -> "1836 6278" [label="[]", style=solid]; +"1836 6278" -> "1840 6281" [label="[]", style=solid]; +"1837 6273" -> "1838 6275" [label="[]", style=solid]; +"1838 6275" -> "1839 6276" [label="[]", style=solid]; +"1838 6275" -> "3196 6286" [label="[]", style=solid]; +"1839 6276" -> "1840 6281" [label="[]", style=solid]; +"1840 6281" -> "1841 6283" [label="[-1, 3]", style=dashed]; +"1841 6283" -> "1842 6284" [label="[-1, 1]", style=dashed]; +"1842 6284" -> "1843 6288" [label="[-1]", style=dashed]; +"1842 6284" -> "3195 6285" [label="[-1]", style=dashed]; +"1843 6288" -> "3176 6520" [label="[]", style=solid]; +"1844 6214" -> "1845 6216" [label="[]", style=dashed]; +"1845 6216" -> "1846 6217" [label="[]", style=dashed]; +"1846 6217" -> "1847 6218" [label="[]", style=solid]; +"1847 6218" -> "1848 6219" [label="[-1, -1]", style=dashed]; +"1848 6219" -> "1849 6220" [label="[-1, -1]", style=dashed]; +"1849 6220" -> "1850 6223" [label="[-1]", style=dashed]; +"1849 6220" -> "1856 6231" [label="[-1]", style=dashed]; +"1850 6223" -> "1852 6224" [label="[-1]", style=dashed]; +"1851 6222" -> "1852 6224" [label="[]", style=solid]; +"1852 6224" -> "1853 6233" [label="[]", style=solid]; +"1852 6224" -> "1861 6244" [label="[]", style=solid]; +"1853 6233" -> "1854 6234" [label="[]", style=solid]; +"1854 6234" -> "1858 6237" [label="[]", style=solid]; +"1855 6229" -> "1856 6231" [label="[]", style=solid]; +"1856 6231" -> "1857 6232" [label="[]", style=solid]; +"1856 6231" -> "3198 6242" [label="[]", style=solid]; +"1857 6232" -> "1858 6237" [label="[]", style=solid]; +"1858 6237" -> "1859 6239" [label="[-1, 3]", style=dashed]; +"1859 6239" -> "1860 6240" [label="[-1, 1]", style=dashed]; +"1860 6240" -> "1861 6244" [label="[-1]", style=dashed]; +"1860 6240" -> "3197 6241" [label="[-1]", style=dashed]; +"1861 6244" -> "3176 6520" [label="[]", style=solid]; +"1862 6170" -> "1863 6172" [label="[]", style=dashed]; +"1863 6172" -> "1864 6173" [label="[]", style=dashed]; +"1864 6173" -> "1865 6174" [label="[]", style=solid]; +"1865 6174" -> "1866 6175" [label="[-1, -1]", style=dashed]; +"1866 6175" -> "1867 6176" [label="[-1, -1]", style=dashed]; +"1867 6176" -> "1868 6179" [label="[-1]", style=dashed]; +"1867 6176" -> "1874 6187" [label="[-1]", style=dashed]; +"1868 6179" -> "1870 6180" [label="[-1]", style=dashed]; +"1869 6178" -> "1870 6180" [label="[]", style=solid]; +"1870 6180" -> "1871 6189" [label="[]", style=solid]; +"1870 6180" -> "1879 6200" [label="[]", style=solid]; +"1871 6189" -> "1872 6190" [label="[]", style=solid]; +"1872 6190" -> "1876 6193" [label="[]", style=solid]; +"1873 6185" -> "1874 6187" [label="[]", style=solid]; +"1874 6187" -> "1875 6188" [label="[]", style=solid]; +"1874 6187" -> "3200 6198" [label="[]", style=solid]; +"1875 6188" -> "1876 6193" [label="[]", style=solid]; +"1876 6193" -> "1877 6195" [label="[-1, 3]", style=dashed]; +"1877 6195" -> "1878 6196" [label="[-1, 1]", style=dashed]; +"1878 6196" -> "1879 6200" [label="[-1]", style=dashed]; +"1878 6196" -> "3199 6197" [label="[-1]", style=dashed]; +"1879 6200" -> "3176 6520" [label="[]", style=solid]; +"1880 6126" -> "1881 6128" [label="[]", style=dashed]; +"1881 6128" -> "1882 6129" [label="[]", style=dashed]; +"1882 6129" -> "1883 6130" [label="[]", style=solid]; +"1883 6130" -> "1884 6131" [label="[-1, -1]", style=dashed]; +"1884 6131" -> "1885 6132" [label="[-1, -1]", style=dashed]; +"1885 6132" -> "1886 6135" [label="[-1]", style=dashed]; +"1885 6132" -> "1892 6143" [label="[-1]", style=dashed]; +"1886 6135" -> "1888 6136" [label="[-1]", style=dashed]; +"1887 6134" -> "1888 6136" [label="[]", style=solid]; +"1888 6136" -> "1889 6145" [label="[]", style=solid]; +"1888 6136" -> "1897 6156" [label="[]", style=solid]; +"1889 6145" -> "1890 6146" [label="[]", style=solid]; +"1890 6146" -> "1894 6149" [label="[]", style=solid]; +"1891 6141" -> "1892 6143" [label="[]", style=solid]; +"1892 6143" -> "1893 6144" [label="[]", style=solid]; +"1892 6143" -> "3202 6154" [label="[]", style=solid]; +"1893 6144" -> "1894 6149" [label="[]", style=solid]; +"1894 6149" -> "1895 6151" [label="[-1, 3]", style=dashed]; +"1895 6151" -> "1896 6152" [label="[-1, 1]", style=dashed]; +"1896 6152" -> "1897 6156" [label="[-1]", style=dashed]; +"1896 6152" -> "3201 6153" [label="[-1]", style=dashed]; +"1897 6156" -> "3176 6520" [label="[]", style=solid]; +"1898 6082" -> "1899 6084" [label="[]", style=dashed]; +"1899 6084" -> "1900 6085" [label="[]", style=dashed]; +"1900 6085" -> "1901 6086" [label="[]", style=solid]; +"1901 6086" -> "1902 6087" [label="[-1, -1]", style=dashed]; +"1902 6087" -> "1903 6088" [label="[-1, -1]", style=dashed]; +"1903 6088" -> "1904 6091" [label="[-1]", style=dashed]; +"1903 6088" -> "1910 6099" [label="[-1]", style=dashed]; +"1904 6091" -> "1906 6092" [label="[-1]", style=dashed]; +"1905 6090" -> "1906 6092" [label="[]", style=solid]; +"1906 6092" -> "1907 6101" [label="[]", style=solid]; +"1906 6092" -> "1915 6112" [label="[]", style=solid]; +"1907 6101" -> "1908 6102" [label="[]", style=solid]; +"1908 6102" -> "1912 6105" [label="[]", style=solid]; +"1909 6097" -> "1910 6099" [label="[]", style=solid]; +"1910 6099" -> "1911 6100" [label="[]", style=solid]; +"1910 6099" -> "3204 6110" [label="[]", style=solid]; +"1911 6100" -> "1912 6105" [label="[]", style=solid]; +"1912 6105" -> "1913 6107" [label="[-1, 3]", style=dashed]; +"1913 6107" -> "1914 6108" [label="[-1, 1]", style=dashed]; +"1914 6108" -> "1915 6112" [label="[-1]", style=dashed]; +"1914 6108" -> "3203 6109" [label="[-1]", style=dashed]; +"1915 6112" -> "3176 6520" [label="[]", style=solid]; +"1916 6038" -> "1917 6040" [label="[]", style=dashed]; +"1917 6040" -> "1918 6041" [label="[]", style=dashed]; +"1918 6041" -> "1919 6042" [label="[]", style=solid]; +"1919 6042" -> "1920 6043" [label="[-1, -1]", style=dashed]; +"1920 6043" -> "1921 6044" [label="[-1, -1]", style=dashed]; +"1921 6044" -> "1922 6047" [label="[-1]", style=dashed]; +"1921 6044" -> "1928 6055" [label="[-1]", style=dashed]; +"1922 6047" -> "1924 6048" [label="[-1]", style=dashed]; +"1923 6046" -> "1924 6048" [label="[]", style=solid]; +"1924 6048" -> "1925 6057" [label="[]", style=solid]; +"1924 6048" -> "1933 6068" [label="[]", style=solid]; +"1925 6057" -> "1926 6058" [label="[]", style=solid]; +"1926 6058" -> "1930 6061" [label="[]", style=solid]; +"1927 6053" -> "1928 6055" [label="[]", style=solid]; +"1928 6055" -> "1929 6056" [label="[]", style=solid]; +"1928 6055" -> "3206 6066" [label="[]", style=solid]; +"1929 6056" -> "1930 6061" [label="[]", style=solid]; +"1930 6061" -> "1931 6063" [label="[-1, 3]", style=dashed]; +"1931 6063" -> "1932 6064" [label="[-1, 1]", style=dashed]; +"1932 6064" -> "1933 6068" [label="[-1]", style=dashed]; +"1932 6064" -> "3205 6065" [label="[-1]", style=dashed]; +"1933 6068" -> "3176 6520" [label="[]", style=solid]; +"1934 5994" -> "1935 5996" [label="[]", style=dashed]; +"1935 5996" -> "1936 5997" [label="[]", style=dashed]; +"1936 5997" -> "1937 5998" [label="[]", style=solid]; +"1937 5998" -> "1938 5999" [label="[-1, -1]", style=dashed]; +"1938 5999" -> "1939 6000" [label="[-1, -1]", style=dashed]; +"1939 6000" -> "1940 6003" [label="[-1]", style=dashed]; +"1939 6000" -> "1946 6011" [label="[-1]", style=dashed]; +"1940 6003" -> "1942 6004" [label="[-1]", style=dashed]; +"1941 6002" -> "1942 6004" [label="[]", style=solid]; +"1942 6004" -> "1943 6013" [label="[]", style=solid]; +"1942 6004" -> "1951 6024" [label="[]", style=solid]; +"1943 6013" -> "1944 6014" [label="[]", style=solid]; +"1944 6014" -> "1948 6017" [label="[]", style=solid]; +"1945 6009" -> "1946 6011" [label="[]", style=solid]; +"1946 6011" -> "1947 6012" [label="[]", style=solid]; +"1946 6011" -> "3208 6022" [label="[]", style=solid]; +"1947 6012" -> "1948 6017" [label="[]", style=solid]; +"1948 6017" -> "1949 6019" [label="[-1, 3]", style=dashed]; +"1949 6019" -> "1950 6020" [label="[-1, 1]", style=dashed]; +"1950 6020" -> "1951 6024" [label="[-1]", style=dashed]; +"1950 6020" -> "3207 6021" [label="[-1]", style=dashed]; +"1951 6024" -> "3176 6520" [label="[]", style=solid]; +"1952 5950" -> "1953 5952" [label="[]", style=dashed]; +"1953 5952" -> "1954 5953" [label="[]", style=dashed]; +"1954 5953" -> "1955 5954" [label="[]", style=solid]; +"1955 5954" -> "1956 5955" [label="[-1, -1]", style=dashed]; +"1956 5955" -> "1957 5956" [label="[-1, -1]", style=dashed]; +"1957 5956" -> "1958 5959" [label="[-1]", style=dashed]; +"1957 5956" -> "1964 5967" [label="[-1]", style=dashed]; +"1958 5959" -> "1960 5960" [label="[-1]", style=dashed]; +"1959 5958" -> "1960 5960" [label="[]", style=solid]; +"1960 5960" -> "1961 5969" [label="[]", style=solid]; +"1960 5960" -> "1969 5980" [label="[]", style=solid]; +"1961 5969" -> "1962 5970" [label="[]", style=solid]; +"1962 5970" -> "1966 5973" [label="[]", style=solid]; +"1963 5965" -> "1964 5967" [label="[]", style=solid]; +"1964 5967" -> "1965 5968" [label="[]", style=solid]; +"1964 5967" -> "3210 5978" [label="[]", style=solid]; +"1965 5968" -> "1966 5973" [label="[]", style=solid]; +"1966 5973" -> "1967 5975" [label="[-1, 3]", style=dashed]; +"1967 5975" -> "1968 5976" [label="[-1, 1]", style=dashed]; +"1968 5976" -> "1969 5980" [label="[-1]", style=dashed]; +"1968 5976" -> "3209 5977" [label="[-1]", style=dashed]; +"1969 5980" -> "3176 6520" [label="[]", style=solid]; +"1970 5906" -> "1971 5908" [label="[]", style=dashed]; +"1971 5908" -> "1972 5909" [label="[]", style=dashed]; +"1972 5909" -> "1973 5910" [label="[]", style=solid]; +"1973 5910" -> "1974 5911" [label="[-1, -1]", style=dashed]; +"1974 5911" -> "1975 5912" [label="[-1, -1]", style=dashed]; +"1975 5912" -> "1976 5915" [label="[-1]", style=dashed]; +"1975 5912" -> "1982 5923" [label="[-1]", style=dashed]; +"1976 5915" -> "1978 5916" [label="[-1]", style=dashed]; +"1977 5914" -> "1978 5916" [label="[]", style=solid]; +"1978 5916" -> "1979 5925" [label="[]", style=solid]; +"1978 5916" -> "1987 5936" [label="[]", style=solid]; +"1979 5925" -> "1980 5926" [label="[]", style=solid]; +"1980 5926" -> "1984 5929" [label="[]", style=solid]; +"1981 5921" -> "1982 5923" [label="[]", style=solid]; +"1982 5923" -> "1983 5924" [label="[]", style=solid]; +"1982 5923" -> "3212 5934" [label="[]", style=solid]; +"1983 5924" -> "1984 5929" [label="[]", style=solid]; +"1984 5929" -> "1985 5931" [label="[-1, 3]", style=dashed]; +"1985 5931" -> "1986 5932" [label="[-1, 1]", style=dashed]; +"1986 5932" -> "1987 5936" [label="[-1]", style=dashed]; +"1986 5932" -> "3211 5933" [label="[-1]", style=dashed]; +"1987 5936" -> "3176 6520" [label="[]", style=solid]; +"1988 5862" -> "1989 5864" [label="[]", style=dashed]; +"1989 5864" -> "1990 5865" [label="[]", style=dashed]; +"1990 5865" -> "1991 5866" [label="[]", style=solid]; +"1991 5866" -> "1992 5867" [label="[-1, -1]", style=dashed]; +"1992 5867" -> "1993 5868" [label="[-1, -1]", style=dashed]; +"1993 5868" -> "1994 5871" [label="[-1]", style=dashed]; +"1993 5868" -> "2000 5879" [label="[-1]", style=dashed]; +"1994 5871" -> "1996 5872" [label="[-1]", style=dashed]; +"1995 5870" -> "1996 5872" [label="[]", style=solid]; +"1996 5872" -> "1997 5881" [label="[]", style=solid]; +"1996 5872" -> "2005 5892" [label="[]", style=solid]; +"1997 5881" -> "1998 5882" [label="[]", style=solid]; +"1998 5882" -> "2002 5885" [label="[]", style=solid]; +"1999 5877" -> "2000 5879" [label="[]", style=solid]; +"2000 5879" -> "2001 5880" [label="[]", style=solid]; +"2000 5879" -> "3214 5890" [label="[]", style=solid]; +"2001 5880" -> "2002 5885" [label="[]", style=solid]; +"2002 5885" -> "2003 5887" [label="[-1, 3]", style=dashed]; +"2003 5887" -> "2004 5888" [label="[-1, 1]", style=dashed]; +"2004 5888" -> "2005 5892" [label="[-1]", style=dashed]; +"2004 5888" -> "3213 5889" [label="[-1]", style=dashed]; +"2005 5892" -> "3176 6520" [label="[]", style=solid]; +"2006 5818" -> "2007 5820" [label="[]", style=dashed]; +"2007 5820" -> "2008 5821" [label="[]", style=dashed]; +"2008 5821" -> "2009 5822" [label="[]", style=solid]; +"2009 5822" -> "2010 5823" [label="[-1, -1]", style=dashed]; +"2010 5823" -> "2011 5824" [label="[-1, -1]", style=dashed]; +"2011 5824" -> "2012 5827" [label="[-1]", style=dashed]; +"2011 5824" -> "2018 5835" [label="[-1]", style=dashed]; +"2012 5827" -> "2014 5828" [label="[-1]", style=dashed]; +"2013 5826" -> "2014 5828" [label="[]", style=solid]; +"2014 5828" -> "2015 5837" [label="[]", style=solid]; +"2014 5828" -> "2023 5848" [label="[]", style=solid]; +"2015 5837" -> "2016 5838" [label="[]", style=solid]; +"2016 5838" -> "2020 5841" [label="[]", style=solid]; +"2017 5833" -> "2018 5835" [label="[]", style=solid]; +"2018 5835" -> "2019 5836" [label="[]", style=solid]; +"2018 5835" -> "3216 5846" [label="[]", style=solid]; +"2019 5836" -> "2020 5841" [label="[]", style=solid]; +"2020 5841" -> "2021 5843" [label="[-1, 3]", style=dashed]; +"2021 5843" -> "2022 5844" [label="[-1, 1]", style=dashed]; +"2022 5844" -> "2023 5848" [label="[-1]", style=dashed]; +"2022 5844" -> "3215 5845" [label="[-1]", style=dashed]; +"2023 5848" -> "3176 6520" [label="[]", style=solid]; +"2024 5774" -> "2025 5776" [label="[]", style=dashed]; +"2025 5776" -> "2026 5777" [label="[]", style=dashed]; +"2026 5777" -> "2027 5778" [label="[]", style=solid]; +"2027 5778" -> "2028 5779" [label="[-1, -1]", style=dashed]; +"2028 5779" -> "2029 5780" [label="[-1, -1]", style=dashed]; +"2029 5780" -> "2030 5783" [label="[-1]", style=dashed]; +"2029 5780" -> "2036 5791" [label="[-1]", style=dashed]; +"2030 5783" -> "2032 5784" [label="[-1]", style=dashed]; +"2031 5782" -> "2032 5784" [label="[]", style=solid]; +"2032 5784" -> "2033 5793" [label="[]", style=solid]; +"2032 5784" -> "2041 5804" [label="[]", style=solid]; +"2033 5793" -> "2034 5794" [label="[]", style=solid]; +"2034 5794" -> "2038 5797" [label="[]", style=solid]; +"2035 5789" -> "2036 5791" [label="[]", style=solid]; +"2036 5791" -> "2037 5792" [label="[]", style=solid]; +"2036 5791" -> "3218 5802" [label="[]", style=solid]; +"2037 5792" -> "2038 5797" [label="[]", style=solid]; +"2038 5797" -> "2039 5799" [label="[-1, 3]", style=dashed]; +"2039 5799" -> "2040 5800" [label="[-1, 1]", style=dashed]; +"2040 5800" -> "2041 5804" [label="[-1]", style=dashed]; +"2040 5800" -> "3217 5801" [label="[-1]", style=dashed]; +"2041 5804" -> "3176 6520" [label="[]", style=solid]; +"2042 5730" -> "2043 5732" [label="[]", style=dashed]; +"2043 5732" -> "2044 5733" [label="[]", style=dashed]; +"2044 5733" -> "2045 5734" [label="[]", style=solid]; +"2045 5734" -> "2046 5735" [label="[-1, -1]", style=dashed]; +"2046 5735" -> "2047 5736" [label="[-1, -1]", style=dashed]; +"2047 5736" -> "2048 5739" [label="[-1]", style=dashed]; +"2047 5736" -> "2054 5747" [label="[-1]", style=dashed]; +"2048 5739" -> "2050 5740" [label="[-1]", style=dashed]; +"2049 5738" -> "2050 5740" [label="[]", style=solid]; +"2050 5740" -> "2051 5749" [label="[]", style=solid]; +"2050 5740" -> "2059 5760" [label="[]", style=solid]; +"2051 5749" -> "2052 5750" [label="[]", style=solid]; +"2052 5750" -> "2056 5753" [label="[]", style=solid]; +"2053 5745" -> "2054 5747" [label="[]", style=solid]; +"2054 5747" -> "2055 5748" [label="[]", style=solid]; +"2054 5747" -> "3220 5758" [label="[]", style=solid]; +"2055 5748" -> "2056 5753" [label="[]", style=solid]; +"2056 5753" -> "2057 5755" [label="[-1, 3]", style=dashed]; +"2057 5755" -> "2058 5756" [label="[-1, 1]", style=dashed]; +"2058 5756" -> "2059 5760" [label="[-1]", style=dashed]; +"2058 5756" -> "3219 5757" [label="[-1]", style=dashed]; +"2059 5760" -> "3176 6520" [label="[]", style=solid]; +"2060 5686" -> "2061 5688" [label="[]", style=dashed]; +"2061 5688" -> "2062 5689" [label="[]", style=dashed]; +"2062 5689" -> "2063 5690" [label="[]", style=solid]; +"2063 5690" -> "2064 5691" [label="[-1, -1]", style=dashed]; +"2064 5691" -> "2065 5692" [label="[-1, -1]", style=dashed]; +"2065 5692" -> "2066 5695" [label="[-1]", style=dashed]; +"2065 5692" -> "2072 5703" [label="[-1]", style=dashed]; +"2066 5695" -> "2068 5696" [label="[-1]", style=dashed]; +"2067 5694" -> "2068 5696" [label="[]", style=solid]; +"2068 5696" -> "2069 5705" [label="[]", style=solid]; +"2068 5696" -> "2077 5716" [label="[]", style=solid]; +"2069 5705" -> "2070 5706" [label="[]", style=solid]; +"2070 5706" -> "2074 5709" [label="[]", style=solid]; +"2071 5701" -> "2072 5703" [label="[]", style=solid]; +"2072 5703" -> "2073 5704" [label="[]", style=solid]; +"2072 5703" -> "3222 5714" [label="[]", style=solid]; +"2073 5704" -> "2074 5709" [label="[]", style=solid]; +"2074 5709" -> "2075 5711" [label="[-1, 3]", style=dashed]; +"2075 5711" -> "2076 5712" [label="[-1, 1]", style=dashed]; +"2076 5712" -> "2077 5716" [label="[-1]", style=dashed]; +"2076 5712" -> "3221 5713" [label="[-1]", style=dashed]; +"2077 5716" -> "3176 6520" [label="[]", style=solid]; +"2078 5642" -> "2079 5644" [label="[]", style=dashed]; +"2079 5644" -> "2080 5645" [label="[]", style=dashed]; +"2080 5645" -> "2081 5646" [label="[]", style=solid]; +"2081 5646" -> "2082 5647" [label="[-1, -1]", style=dashed]; +"2082 5647" -> "2083 5648" [label="[-1, -1]", style=dashed]; +"2083 5648" -> "2084 5651" [label="[-1]", style=dashed]; +"2083 5648" -> "2090 5659" [label="[-1]", style=dashed]; +"2084 5651" -> "2086 5652" [label="[-1]", style=dashed]; +"2085 5650" -> "2086 5652" [label="[]", style=solid]; +"2086 5652" -> "2087 5661" [label="[]", style=solid]; +"2086 5652" -> "2095 5672" [label="[]", style=solid]; +"2087 5661" -> "2088 5662" [label="[]", style=solid]; +"2088 5662" -> "2092 5665" [label="[]", style=solid]; +"2089 5657" -> "2090 5659" [label="[]", style=solid]; +"2090 5659" -> "2091 5660" [label="[]", style=solid]; +"2090 5659" -> "3224 5670" [label="[]", style=solid]; +"2091 5660" -> "2092 5665" [label="[]", style=solid]; +"2092 5665" -> "2093 5667" [label="[-1, 3]", style=dashed]; +"2093 5667" -> "2094 5668" [label="[-1, 1]", style=dashed]; +"2094 5668" -> "2095 5672" [label="[-1]", style=dashed]; +"2094 5668" -> "3223 5669" [label="[-1]", style=dashed]; +"2095 5672" -> "3176 6520" [label="[]", style=solid]; +"2096 5598" -> "2097 5600" [label="[]", style=dashed]; +"2097 5600" -> "2098 5601" [label="[]", style=dashed]; +"2098 5601" -> "2099 5602" [label="[]", style=solid]; +"2099 5602" -> "2100 5603" [label="[-1, -1]", style=dashed]; +"2100 5603" -> "2101 5604" [label="[-1, -1]", style=dashed]; +"2101 5604" -> "2102 5607" [label="[-1]", style=dashed]; +"2101 5604" -> "2108 5615" [label="[-1]", style=dashed]; +"2102 5607" -> "2104 5608" [label="[-1]", style=dashed]; +"2103 5606" -> "2104 5608" [label="[]", style=solid]; +"2104 5608" -> "2105 5617" [label="[]", style=solid]; +"2104 5608" -> "2113 5628" [label="[]", style=solid]; +"2105 5617" -> "2106 5618" [label="[]", style=solid]; +"2106 5618" -> "2110 5621" [label="[]", style=solid]; +"2107 5613" -> "2108 5615" [label="[]", style=solid]; +"2108 5615" -> "2109 5616" [label="[]", style=solid]; +"2108 5615" -> "3226 5626" [label="[]", style=solid]; +"2109 5616" -> "2110 5621" [label="[]", style=solid]; +"2110 5621" -> "2111 5623" [label="[-1, 3]", style=dashed]; +"2111 5623" -> "2112 5624" [label="[-1, 1]", style=dashed]; +"2112 5624" -> "2113 5628" [label="[-1]", style=dashed]; +"2112 5624" -> "3225 5625" [label="[-1]", style=dashed]; +"2113 5628" -> "3176 6520" [label="[]", style=solid]; +"2114 5554" -> "2115 5556" [label="[]", style=dashed]; +"2115 5556" -> "2116 5557" [label="[]", style=dashed]; +"2116 5557" -> "2117 5558" [label="[]", style=solid]; +"2117 5558" -> "2118 5559" [label="[-1, -1]", style=dashed]; +"2118 5559" -> "2119 5560" [label="[-1, -1]", style=dashed]; +"2119 5560" -> "2120 5563" [label="[-1]", style=dashed]; +"2119 5560" -> "2126 5571" [label="[-1]", style=dashed]; +"2120 5563" -> "2122 5564" [label="[-1]", style=dashed]; +"2121 5562" -> "2122 5564" [label="[]", style=solid]; +"2122 5564" -> "2123 5573" [label="[]", style=solid]; +"2122 5564" -> "2131 5584" [label="[]", style=solid]; +"2123 5573" -> "2124 5574" [label="[]", style=solid]; +"2124 5574" -> "2128 5577" [label="[]", style=solid]; +"2125 5569" -> "2126 5571" [label="[]", style=solid]; +"2126 5571" -> "2127 5572" [label="[]", style=solid]; +"2126 5571" -> "3228 5582" [label="[]", style=solid]; +"2127 5572" -> "2128 5577" [label="[]", style=solid]; +"2128 5577" -> "2129 5579" [label="[-1, 3]", style=dashed]; +"2129 5579" -> "2130 5580" [label="[-1, 1]", style=dashed]; +"2130 5580" -> "2131 5584" [label="[-1]", style=dashed]; +"2130 5580" -> "3227 5581" [label="[-1]", style=dashed]; +"2131 5584" -> "3176 6520" [label="[]", style=solid]; +"2132 5510" -> "2133 5512" [label="[]", style=dashed]; +"2133 5512" -> "2134 5513" [label="[]", style=dashed]; +"2134 5513" -> "2135 5514" [label="[]", style=solid]; +"2135 5514" -> "2136 5515" [label="[-1, -1]", style=dashed]; +"2136 5515" -> "2137 5516" [label="[-1, -1]", style=dashed]; +"2137 5516" -> "2138 5519" [label="[-1]", style=dashed]; +"2137 5516" -> "2144 5527" [label="[-1]", style=dashed]; +"2138 5519" -> "2140 5520" [label="[-1]", style=dashed]; +"2139 5518" -> "2140 5520" [label="[]", style=solid]; +"2140 5520" -> "2141 5529" [label="[]", style=solid]; +"2140 5520" -> "2149 5540" [label="[]", style=solid]; +"2141 5529" -> "2142 5530" [label="[]", style=solid]; +"2142 5530" -> "2146 5533" [label="[]", style=solid]; +"2143 5525" -> "2144 5527" [label="[]", style=solid]; +"2144 5527" -> "2145 5528" [label="[]", style=solid]; +"2144 5527" -> "3230 5538" [label="[]", style=solid]; +"2145 5528" -> "2146 5533" [label="[]", style=solid]; +"2146 5533" -> "2147 5535" [label="[-1, 3]", style=dashed]; +"2147 5535" -> "2148 5536" [label="[-1, 1]", style=dashed]; +"2148 5536" -> "2149 5540" [label="[-1]", style=dashed]; +"2148 5536" -> "3229 5537" [label="[-1]", style=dashed]; +"2149 5540" -> "3176 6520" [label="[]", style=solid]; +"2150 5466" -> "2151 5468" [label="[]", style=dashed]; +"2151 5468" -> "2152 5469" [label="[]", style=dashed]; +"2152 5469" -> "2153 5470" [label="[]", style=solid]; +"2153 5470" -> "2154 5471" [label="[-1, -1]", style=dashed]; +"2154 5471" -> "2155 5472" [label="[-1, -1]", style=dashed]; +"2155 5472" -> "2156 5475" [label="[-1]", style=dashed]; +"2155 5472" -> "2162 5483" [label="[-1]", style=dashed]; +"2156 5475" -> "2158 5476" [label="[-1]", style=dashed]; +"2157 5474" -> "2158 5476" [label="[]", style=solid]; +"2158 5476" -> "2159 5485" [label="[]", style=solid]; +"2158 5476" -> "2167 5496" [label="[]", style=solid]; +"2159 5485" -> "2160 5486" [label="[]", style=solid]; +"2160 5486" -> "2164 5489" [label="[]", style=solid]; +"2161 5481" -> "2162 5483" [label="[]", style=solid]; +"2162 5483" -> "2163 5484" [label="[]", style=solid]; +"2162 5483" -> "3232 5494" [label="[]", style=solid]; +"2163 5484" -> "2164 5489" [label="[]", style=solid]; +"2164 5489" -> "2165 5491" [label="[-1, 3]", style=dashed]; +"2165 5491" -> "2166 5492" [label="[-1, 1]", style=dashed]; +"2166 5492" -> "2167 5496" [label="[-1]", style=dashed]; +"2166 5492" -> "3231 5493" [label="[-1]", style=dashed]; +"2167 5496" -> "3176 6520" [label="[]", style=solid]; +"2168 5422" -> "2169 5424" [label="[]", style=dashed]; +"2169 5424" -> "2170 5425" [label="[]", style=dashed]; +"2170 5425" -> "2171 5426" [label="[]", style=solid]; +"2171 5426" -> "2172 5427" [label="[-1, -1]", style=dashed]; +"2172 5427" -> "2173 5428" [label="[-1, -1]", style=dashed]; +"2173 5428" -> "2174 5431" [label="[-1]", style=dashed]; +"2173 5428" -> "2180 5439" [label="[-1]", style=dashed]; +"2174 5431" -> "2176 5432" [label="[-1]", style=dashed]; +"2175 5430" -> "2176 5432" [label="[]", style=solid]; +"2176 5432" -> "2177 5441" [label="[]", style=solid]; +"2176 5432" -> "2185 5452" [label="[]", style=solid]; +"2177 5441" -> "2178 5442" [label="[]", style=solid]; +"2178 5442" -> "2182 5445" [label="[]", style=solid]; +"2179 5437" -> "2180 5439" [label="[]", style=solid]; +"2180 5439" -> "2181 5440" [label="[]", style=solid]; +"2180 5439" -> "3234 5450" [label="[]", style=solid]; +"2181 5440" -> "2182 5445" [label="[]", style=solid]; +"2182 5445" -> "2183 5447" [label="[-1, 3]", style=dashed]; +"2183 5447" -> "2184 5448" [label="[-1, 1]", style=dashed]; +"2184 5448" -> "2185 5452" [label="[-1]", style=dashed]; +"2184 5448" -> "3233 5449" [label="[-1]", style=dashed]; +"2185 5452" -> "3176 6520" [label="[]", style=solid]; +"2186 5378" -> "2187 5380" [label="[]", style=dashed]; +"2187 5380" -> "2188 5381" [label="[]", style=dashed]; +"2188 5381" -> "2189 5382" [label="[]", style=solid]; +"2189 5382" -> "2190 5383" [label="[-1, -1]", style=dashed]; +"2190 5383" -> "2191 5384" [label="[-1, -1]", style=dashed]; +"2191 5384" -> "2192 5387" [label="[-1]", style=dashed]; +"2191 5384" -> "2198 5395" [label="[-1]", style=dashed]; +"2192 5387" -> "2194 5388" [label="[-1]", style=dashed]; +"2193 5386" -> "2194 5388" [label="[]", style=solid]; +"2194 5388" -> "2195 5397" [label="[]", style=solid]; +"2194 5388" -> "2203 5408" [label="[]", style=solid]; +"2195 5397" -> "2196 5398" [label="[]", style=solid]; +"2196 5398" -> "2200 5401" [label="[]", style=solid]; +"2197 5393" -> "2198 5395" [label="[]", style=solid]; +"2198 5395" -> "2199 5396" [label="[]", style=solid]; +"2198 5395" -> "3236 5406" [label="[]", style=solid]; +"2199 5396" -> "2200 5401" [label="[]", style=solid]; +"2200 5401" -> "2201 5403" [label="[-1, 3]", style=dashed]; +"2201 5403" -> "2202 5404" [label="[-1, 1]", style=dashed]; +"2202 5404" -> "2203 5408" [label="[-1]", style=dashed]; +"2202 5404" -> "3235 5405" [label="[-1]", style=dashed]; +"2203 5408" -> "3176 6520" [label="[]", style=solid]; +"2204 5334" -> "2205 5336" [label="[]", style=dashed]; +"2205 5336" -> "2206 5337" [label="[]", style=dashed]; +"2206 5337" -> "2207 5338" [label="[]", style=solid]; +"2207 5338" -> "2208 5339" [label="[-1, -1]", style=dashed]; +"2208 5339" -> "2209 5340" [label="[-1, -1]", style=dashed]; +"2209 5340" -> "2210 5343" [label="[-1]", style=dashed]; +"2209 5340" -> "2216 5351" [label="[-1]", style=dashed]; +"2210 5343" -> "2212 5344" [label="[-1]", style=dashed]; +"2211 5342" -> "2212 5344" [label="[]", style=solid]; +"2212 5344" -> "2213 5353" [label="[]", style=solid]; +"2212 5344" -> "2221 5364" [label="[]", style=solid]; +"2213 5353" -> "2214 5354" [label="[]", style=solid]; +"2214 5354" -> "2218 5357" [label="[]", style=solid]; +"2215 5349" -> "2216 5351" [label="[]", style=solid]; +"2216 5351" -> "2217 5352" [label="[]", style=solid]; +"2216 5351" -> "3238 5362" [label="[]", style=solid]; +"2217 5352" -> "2218 5357" [label="[]", style=solid]; +"2218 5357" -> "2219 5359" [label="[-1, 3]", style=dashed]; +"2219 5359" -> "2220 5360" [label="[-1, 1]", style=dashed]; +"2220 5360" -> "2221 5364" [label="[-1]", style=dashed]; +"2220 5360" -> "3237 5361" [label="[-1]", style=dashed]; +"2221 5364" -> "3176 6520" [label="[]", style=solid]; +"2222 5290" -> "2223 5292" [label="[]", style=dashed]; +"2223 5292" -> "2224 5293" [label="[]", style=dashed]; +"2224 5293" -> "2225 5294" [label="[]", style=solid]; +"2225 5294" -> "2226 5295" [label="[-1, -1]", style=dashed]; +"2226 5295" -> "2227 5296" [label="[-1, -1]", style=dashed]; +"2227 5296" -> "2228 5299" [label="[-1]", style=dashed]; +"2227 5296" -> "2234 5307" [label="[-1]", style=dashed]; +"2228 5299" -> "2230 5300" [label="[-1]", style=dashed]; +"2229 5298" -> "2230 5300" [label="[]", style=solid]; +"2230 5300" -> "2231 5309" [label="[]", style=solid]; +"2230 5300" -> "2239 5320" [label="[]", style=solid]; +"2231 5309" -> "2232 5310" [label="[]", style=solid]; +"2232 5310" -> "2236 5313" [label="[]", style=solid]; +"2233 5305" -> "2234 5307" [label="[]", style=solid]; +"2234 5307" -> "2235 5308" [label="[]", style=solid]; +"2234 5307" -> "3240 5318" [label="[]", style=solid]; +"2235 5308" -> "2236 5313" [label="[]", style=solid]; +"2236 5313" -> "2237 5315" [label="[-1, 3]", style=dashed]; +"2237 5315" -> "2238 5316" [label="[-1, 1]", style=dashed]; +"2238 5316" -> "2239 5320" [label="[-1]", style=dashed]; +"2238 5316" -> "3239 5317" [label="[-1]", style=dashed]; +"2239 5320" -> "3176 6520" [label="[]", style=solid]; +"2240 5246" -> "2241 5248" [label="[]", style=dashed]; +"2241 5248" -> "2242 5249" [label="[]", style=dashed]; +"2242 5249" -> "2243 5250" [label="[]", style=solid]; +"2243 5250" -> "2244 5251" [label="[-1, -1]", style=dashed]; +"2244 5251" -> "2245 5252" [label="[-1, -1]", style=dashed]; +"2245 5252" -> "2246 5255" [label="[-1]", style=dashed]; +"2245 5252" -> "2252 5263" [label="[-1]", style=dashed]; +"2246 5255" -> "2248 5256" [label="[-1]", style=dashed]; +"2247 5254" -> "2248 5256" [label="[]", style=solid]; +"2248 5256" -> "2249 5265" [label="[]", style=solid]; +"2248 5256" -> "2257 5276" [label="[]", style=solid]; +"2249 5265" -> "2250 5266" [label="[]", style=solid]; +"2250 5266" -> "2254 5269" [label="[]", style=solid]; +"2251 5261" -> "2252 5263" [label="[]", style=solid]; +"2252 5263" -> "2253 5264" [label="[]", style=solid]; +"2252 5263" -> "3242 5274" [label="[]", style=solid]; +"2253 5264" -> "2254 5269" [label="[]", style=solid]; +"2254 5269" -> "2255 5271" [label="[-1, 3]", style=dashed]; +"2255 5271" -> "2256 5272" [label="[-1, 1]", style=dashed]; +"2256 5272" -> "2257 5276" [label="[-1]", style=dashed]; +"2256 5272" -> "3241 5273" [label="[-1]", style=dashed]; +"2257 5276" -> "3176 6520" [label="[]", style=solid]; +"2258 5202" -> "2259 5204" [label="[]", style=dashed]; +"2259 5204" -> "2260 5205" [label="[]", style=dashed]; +"2260 5205" -> "2261 5206" [label="[]", style=solid]; +"2261 5206" -> "2262 5207" [label="[-1, -1]", style=dashed]; +"2262 5207" -> "2263 5208" [label="[-1, -1]", style=dashed]; +"2263 5208" -> "2264 5211" [label="[-1]", style=dashed]; +"2263 5208" -> "2270 5219" [label="[-1]", style=dashed]; +"2264 5211" -> "2266 5212" [label="[-1]", style=dashed]; +"2265 5210" -> "2266 5212" [label="[]", style=solid]; +"2266 5212" -> "2267 5221" [label="[]", style=solid]; +"2266 5212" -> "2275 5232" [label="[]", style=solid]; +"2267 5221" -> "2268 5222" [label="[]", style=solid]; +"2268 5222" -> "2272 5225" [label="[]", style=solid]; +"2269 5217" -> "2270 5219" [label="[]", style=solid]; +"2270 5219" -> "2271 5220" [label="[]", style=solid]; +"2270 5219" -> "3244 5230" [label="[]", style=solid]; +"2271 5220" -> "2272 5225" [label="[]", style=solid]; +"2272 5225" -> "2273 5227" [label="[-1, 3]", style=dashed]; +"2273 5227" -> "2274 5228" [label="[-1, 1]", style=dashed]; +"2274 5228" -> "2275 5232" [label="[-1]", style=dashed]; +"2274 5228" -> "3243 5229" [label="[-1]", style=dashed]; +"2275 5232" -> "3176 6520" [label="[]", style=solid]; +"2276 5158" -> "2277 5160" [label="[]", style=dashed]; +"2277 5160" -> "2278 5161" [label="[]", style=dashed]; +"2278 5161" -> "2279 5162" [label="[]", style=solid]; +"2279 5162" -> "2280 5163" [label="[-1, -1]", style=dashed]; +"2280 5163" -> "2281 5164" [label="[-1, -1]", style=dashed]; +"2281 5164" -> "2282 5167" [label="[-1]", style=dashed]; +"2281 5164" -> "2288 5175" [label="[-1]", style=dashed]; +"2282 5167" -> "2284 5168" [label="[-1]", style=dashed]; +"2283 5166" -> "2284 5168" [label="[]", style=solid]; +"2284 5168" -> "2285 5177" [label="[]", style=solid]; +"2284 5168" -> "2293 5188" [label="[]", style=solid]; +"2285 5177" -> "2286 5178" [label="[]", style=solid]; +"2286 5178" -> "2290 5181" [label="[]", style=solid]; +"2287 5173" -> "2288 5175" [label="[]", style=solid]; +"2288 5175" -> "2289 5176" [label="[]", style=solid]; +"2288 5175" -> "3246 5186" [label="[]", style=solid]; +"2289 5176" -> "2290 5181" [label="[]", style=solid]; +"2290 5181" -> "2291 5183" [label="[-1, 3]", style=dashed]; +"2291 5183" -> "2292 5184" [label="[-1, 1]", style=dashed]; +"2292 5184" -> "2293 5188" [label="[-1]", style=dashed]; +"2292 5184" -> "3245 5185" [label="[-1]", style=dashed]; +"2293 5188" -> "3176 6520" [label="[]", style=solid]; +"2294 5114" -> "2295 5116" [label="[]", style=dashed]; +"2295 5116" -> "2296 5117" [label="[]", style=dashed]; +"2296 5117" -> "2297 5118" [label="[]", style=solid]; +"2297 5118" -> "2298 5119" [label="[-1, -1]", style=dashed]; +"2298 5119" -> "2299 5120" [label="[-1, -1]", style=dashed]; +"2299 5120" -> "2300 5123" [label="[-1]", style=dashed]; +"2299 5120" -> "2306 5131" [label="[-1]", style=dashed]; +"2300 5123" -> "2302 5124" [label="[-1]", style=dashed]; +"2301 5122" -> "2302 5124" [label="[]", style=solid]; +"2302 5124" -> "2303 5133" [label="[]", style=solid]; +"2302 5124" -> "2311 5144" [label="[]", style=solid]; +"2303 5133" -> "2304 5134" [label="[]", style=solid]; +"2304 5134" -> "2308 5137" [label="[]", style=solid]; +"2305 5129" -> "2306 5131" [label="[]", style=solid]; +"2306 5131" -> "2307 5132" [label="[]", style=solid]; +"2306 5131" -> "3248 5142" [label="[]", style=solid]; +"2307 5132" -> "2308 5137" [label="[]", style=solid]; +"2308 5137" -> "2309 5139" [label="[-1, 3]", style=dashed]; +"2309 5139" -> "2310 5140" [label="[-1, 1]", style=dashed]; +"2310 5140" -> "2311 5144" [label="[-1]", style=dashed]; +"2310 5140" -> "3247 5141" [label="[-1]", style=dashed]; +"2311 5144" -> "3176 6520" [label="[]", style=solid]; +"2312 5070" -> "2313 5072" [label="[]", style=dashed]; +"2313 5072" -> "2314 5073" [label="[]", style=dashed]; +"2314 5073" -> "2315 5074" [label="[]", style=solid]; +"2315 5074" -> "2316 5075" [label="[-1, -1]", style=dashed]; +"2316 5075" -> "2317 5076" [label="[-1, -1]", style=dashed]; +"2317 5076" -> "2318 5079" [label="[-1]", style=dashed]; +"2317 5076" -> "2324 5087" [label="[-1]", style=dashed]; +"2318 5079" -> "2320 5080" [label="[-1]", style=dashed]; +"2319 5078" -> "2320 5080" [label="[]", style=solid]; +"2320 5080" -> "2321 5089" [label="[]", style=solid]; +"2320 5080" -> "2329 5100" [label="[]", style=solid]; +"2321 5089" -> "2322 5090" [label="[]", style=solid]; +"2322 5090" -> "2326 5093" [label="[]", style=solid]; +"2323 5085" -> "2324 5087" [label="[]", style=solid]; +"2324 5087" -> "2325 5088" [label="[]", style=solid]; +"2324 5087" -> "3250 5098" [label="[]", style=solid]; +"2325 5088" -> "2326 5093" [label="[]", style=solid]; +"2326 5093" -> "2327 5095" [label="[-1, 3]", style=dashed]; +"2327 5095" -> "2328 5096" [label="[-1, 1]", style=dashed]; +"2328 5096" -> "2329 5100" [label="[-1]", style=dashed]; +"2328 5096" -> "3249 5097" [label="[-1]", style=dashed]; +"2329 5100" -> "3176 6520" [label="[]", style=solid]; +"2330 5026" -> "2331 5028" [label="[]", style=dashed]; +"2331 5028" -> "2332 5029" [label="[]", style=dashed]; +"2332 5029" -> "2333 5030" [label="[]", style=solid]; +"2333 5030" -> "2334 5031" [label="[-1, -1]", style=dashed]; +"2334 5031" -> "2335 5032" [label="[-1, -1]", style=dashed]; +"2335 5032" -> "2336 5035" [label="[-1]", style=dashed]; +"2335 5032" -> "2342 5043" [label="[-1]", style=dashed]; +"2336 5035" -> "2338 5036" [label="[-1]", style=dashed]; +"2337 5034" -> "2338 5036" [label="[]", style=solid]; +"2338 5036" -> "2339 5045" [label="[]", style=solid]; +"2338 5036" -> "2347 5056" [label="[]", style=solid]; +"2339 5045" -> "2340 5046" [label="[]", style=solid]; +"2340 5046" -> "2344 5049" [label="[]", style=solid]; +"2341 5041" -> "2342 5043" [label="[]", style=solid]; +"2342 5043" -> "2343 5044" [label="[]", style=solid]; +"2342 5043" -> "3252 5054" [label="[]", style=solid]; +"2343 5044" -> "2344 5049" [label="[]", style=solid]; +"2344 5049" -> "2345 5051" [label="[-1, 3]", style=dashed]; +"2345 5051" -> "2346 5052" [label="[-1, 1]", style=dashed]; +"2346 5052" -> "2347 5056" [label="[-1]", style=dashed]; +"2346 5052" -> "3251 5053" [label="[-1]", style=dashed]; +"2347 5056" -> "3176 6520" [label="[]", style=solid]; +"2348 4982" -> "2349 4984" [label="[]", style=dashed]; +"2349 4984" -> "2350 4985" [label="[]", style=dashed]; +"2350 4985" -> "2351 4986" [label="[]", style=solid]; +"2351 4986" -> "2352 4987" [label="[-1, -1]", style=dashed]; +"2352 4987" -> "2353 4988" [label="[-1, -1]", style=dashed]; +"2353 4988" -> "2354 4991" [label="[-1]", style=dashed]; +"2353 4988" -> "2360 4999" [label="[-1]", style=dashed]; +"2354 4991" -> "2356 4992" [label="[-1]", style=dashed]; +"2355 4990" -> "2356 4992" [label="[]", style=solid]; +"2356 4992" -> "2357 5001" [label="[]", style=solid]; +"2356 4992" -> "2365 5012" [label="[]", style=solid]; +"2357 5001" -> "2358 5002" [label="[]", style=solid]; +"2358 5002" -> "2362 5005" [label="[]", style=solid]; +"2359 4997" -> "2360 4999" [label="[]", style=solid]; +"2360 4999" -> "2361 5000" [label="[]", style=solid]; +"2360 4999" -> "3254 5010" [label="[]", style=solid]; +"2361 5000" -> "2362 5005" [label="[]", style=solid]; +"2362 5005" -> "2363 5007" [label="[-1, 3]", style=dashed]; +"2363 5007" -> "2364 5008" [label="[-1, 1]", style=dashed]; +"2364 5008" -> "2365 5012" [label="[-1]", style=dashed]; +"2364 5008" -> "3253 5009" [label="[-1]", style=dashed]; +"2365 5012" -> "3176 6520" [label="[]", style=solid]; +"2366 4938" -> "2367 4940" [label="[]", style=dashed]; +"2367 4940" -> "2368 4941" [label="[]", style=dashed]; +"2368 4941" -> "2369 4942" [label="[]", style=solid]; +"2369 4942" -> "2370 4943" [label="[-1, -1]", style=dashed]; +"2370 4943" -> "2371 4944" [label="[-1, -1]", style=dashed]; +"2371 4944" -> "2372 4947" [label="[-1]", style=dashed]; +"2371 4944" -> "2378 4955" [label="[-1]", style=dashed]; +"2372 4947" -> "2374 4948" [label="[-1]", style=dashed]; +"2373 4946" -> "2374 4948" [label="[]", style=solid]; +"2374 4948" -> "2375 4957" [label="[]", style=solid]; +"2374 4948" -> "2383 4968" [label="[]", style=solid]; +"2375 4957" -> "2376 4958" [label="[]", style=solid]; +"2376 4958" -> "2380 4961" [label="[]", style=solid]; +"2377 4953" -> "2378 4955" [label="[]", style=solid]; +"2378 4955" -> "2379 4956" [label="[]", style=solid]; +"2378 4955" -> "3256 4966" [label="[]", style=solid]; +"2379 4956" -> "2380 4961" [label="[]", style=solid]; +"2380 4961" -> "2381 4963" [label="[-1, 3]", style=dashed]; +"2381 4963" -> "2382 4964" [label="[-1, 1]", style=dashed]; +"2382 4964" -> "2383 4968" [label="[-1]", style=dashed]; +"2382 4964" -> "3255 4965" [label="[-1]", style=dashed]; +"2383 4968" -> "3176 6520" [label="[]", style=solid]; +"2384 4894" -> "2385 4896" [label="[]", style=dashed]; +"2385 4896" -> "2386 4897" [label="[]", style=dashed]; +"2386 4897" -> "2387 4898" [label="[]", style=solid]; +"2387 4898" -> "2388 4899" [label="[-1, -1]", style=dashed]; +"2388 4899" -> "2389 4900" [label="[-1, -1]", style=dashed]; +"2389 4900" -> "2390 4903" [label="[-1]", style=dashed]; +"2389 4900" -> "2396 4911" [label="[-1]", style=dashed]; +"2390 4903" -> "2392 4904" [label="[-1]", style=dashed]; +"2391 4902" -> "2392 4904" [label="[]", style=solid]; +"2392 4904" -> "2393 4913" [label="[]", style=solid]; +"2392 4904" -> "2401 4924" [label="[]", style=solid]; +"2393 4913" -> "2394 4914" [label="[]", style=solid]; +"2394 4914" -> "2398 4917" [label="[]", style=solid]; +"2395 4909" -> "2396 4911" [label="[]", style=solid]; +"2396 4911" -> "2397 4912" [label="[]", style=solid]; +"2396 4911" -> "3258 4922" [label="[]", style=solid]; +"2397 4912" -> "2398 4917" [label="[]", style=solid]; +"2398 4917" -> "2399 4919" [label="[-1, 3]", style=dashed]; +"2399 4919" -> "2400 4920" [label="[-1, 1]", style=dashed]; +"2400 4920" -> "2401 4924" [label="[-1]", style=dashed]; +"2400 4920" -> "3257 4921" [label="[-1]", style=dashed]; +"2401 4924" -> "3176 6520" [label="[]", style=solid]; +"2402 4850" -> "2403 4852" [label="[]", style=dashed]; +"2403 4852" -> "2404 4853" [label="[]", style=dashed]; +"2404 4853" -> "2405 4854" [label="[]", style=solid]; +"2405 4854" -> "2406 4855" [label="[-1, -1]", style=dashed]; +"2406 4855" -> "2407 4856" [label="[-1, -1]", style=dashed]; +"2407 4856" -> "2408 4859" [label="[-1]", style=dashed]; +"2407 4856" -> "2414 4867" [label="[-1]", style=dashed]; +"2408 4859" -> "2410 4860" [label="[-1]", style=dashed]; +"2409 4858" -> "2410 4860" [label="[]", style=solid]; +"2410 4860" -> "2411 4869" [label="[]", style=solid]; +"2410 4860" -> "2419 4880" [label="[]", style=solid]; +"2411 4869" -> "2412 4870" [label="[]", style=solid]; +"2412 4870" -> "2416 4873" [label="[]", style=solid]; +"2413 4865" -> "2414 4867" [label="[]", style=solid]; +"2414 4867" -> "2415 4868" [label="[]", style=solid]; +"2414 4867" -> "3260 4878" [label="[]", style=solid]; +"2415 4868" -> "2416 4873" [label="[]", style=solid]; +"2416 4873" -> "2417 4875" [label="[-1, 3]", style=dashed]; +"2417 4875" -> "2418 4876" [label="[-1, 1]", style=dashed]; +"2418 4876" -> "2419 4880" [label="[-1]", style=dashed]; +"2418 4876" -> "3259 4877" [label="[-1]", style=dashed]; +"2419 4880" -> "3176 6520" [label="[]", style=solid]; +"2420 4806" -> "2421 4808" [label="[]", style=dashed]; +"2421 4808" -> "2422 4809" [label="[]", style=dashed]; +"2422 4809" -> "2423 4810" [label="[]", style=solid]; +"2423 4810" -> "2424 4811" [label="[-1, -1]", style=dashed]; +"2424 4811" -> "2425 4812" [label="[-1, -1]", style=dashed]; +"2425 4812" -> "2426 4815" [label="[-1]", style=dashed]; +"2425 4812" -> "2432 4823" [label="[-1]", style=dashed]; +"2426 4815" -> "2428 4816" [label="[-1]", style=dashed]; +"2427 4814" -> "2428 4816" [label="[]", style=solid]; +"2428 4816" -> "2429 4825" [label="[]", style=solid]; +"2428 4816" -> "2437 4836" [label="[]", style=solid]; +"2429 4825" -> "2430 4826" [label="[]", style=solid]; +"2430 4826" -> "2434 4829" [label="[]", style=solid]; +"2431 4821" -> "2432 4823" [label="[]", style=solid]; +"2432 4823" -> "2433 4824" [label="[]", style=solid]; +"2432 4823" -> "3262 4834" [label="[]", style=solid]; +"2433 4824" -> "2434 4829" [label="[]", style=solid]; +"2434 4829" -> "2435 4831" [label="[-1, 3]", style=dashed]; +"2435 4831" -> "2436 4832" [label="[-1, 1]", style=dashed]; +"2436 4832" -> "2437 4836" [label="[-1]", style=dashed]; +"2436 4832" -> "3261 4833" [label="[-1]", style=dashed]; +"2437 4836" -> "3176 6520" [label="[]", style=solid]; +"2438 4762" -> "2439 4764" [label="[]", style=dashed]; +"2439 4764" -> "2440 4765" [label="[]", style=dashed]; +"2440 4765" -> "2441 4766" [label="[]", style=solid]; +"2441 4766" -> "2442 4767" [label="[-1, -1]", style=dashed]; +"2442 4767" -> "2443 4768" [label="[-1, -1]", style=dashed]; +"2443 4768" -> "2444 4771" [label="[-1]", style=dashed]; +"2443 4768" -> "2450 4779" [label="[-1]", style=dashed]; +"2444 4771" -> "2446 4772" [label="[-1]", style=dashed]; +"2445 4770" -> "2446 4772" [label="[]", style=solid]; +"2446 4772" -> "2447 4781" [label="[]", style=solid]; +"2446 4772" -> "2455 4792" [label="[]", style=solid]; +"2447 4781" -> "2448 4782" [label="[]", style=solid]; +"2448 4782" -> "2452 4785" [label="[]", style=solid]; +"2449 4777" -> "2450 4779" [label="[]", style=solid]; +"2450 4779" -> "2451 4780" [label="[]", style=solid]; +"2450 4779" -> "3264 4790" [label="[]", style=solid]; +"2451 4780" -> "2452 4785" [label="[]", style=solid]; +"2452 4785" -> "2453 4787" [label="[-1, 3]", style=dashed]; +"2453 4787" -> "2454 4788" [label="[-1, 1]", style=dashed]; +"2454 4788" -> "2455 4792" [label="[-1]", style=dashed]; +"2454 4788" -> "3263 4789" [label="[-1]", style=dashed]; +"2455 4792" -> "3176 6520" [label="[]", style=solid]; +"2456 4718" -> "2457 4720" [label="[]", style=dashed]; +"2457 4720" -> "2458 4721" [label="[]", style=dashed]; +"2458 4721" -> "2459 4722" [label="[]", style=solid]; +"2459 4722" -> "2460 4723" [label="[-1, -1]", style=dashed]; +"2460 4723" -> "2461 4724" [label="[-1, -1]", style=dashed]; +"2461 4724" -> "2462 4727" [label="[-1]", style=dashed]; +"2461 4724" -> "2468 4735" [label="[-1]", style=dashed]; +"2462 4727" -> "2464 4728" [label="[-1]", style=dashed]; +"2463 4726" -> "2464 4728" [label="[]", style=solid]; +"2464 4728" -> "2465 4737" [label="[]", style=solid]; +"2464 4728" -> "2473 4748" [label="[]", style=solid]; +"2465 4737" -> "2466 4738" [label="[]", style=solid]; +"2466 4738" -> "2470 4741" [label="[]", style=solid]; +"2467 4733" -> "2468 4735" [label="[]", style=solid]; +"2468 4735" -> "2469 4736" [label="[]", style=solid]; +"2468 4735" -> "3266 4746" [label="[]", style=solid]; +"2469 4736" -> "2470 4741" [label="[]", style=solid]; +"2470 4741" -> "2471 4743" [label="[-1, 3]", style=dashed]; +"2471 4743" -> "2472 4744" [label="[-1, 1]", style=dashed]; +"2472 4744" -> "2473 4748" [label="[-1]", style=dashed]; +"2472 4744" -> "3265 4745" [label="[-1]", style=dashed]; +"2473 4748" -> "3176 6520" [label="[]", style=solid]; +"2474 4674" -> "2475 4676" [label="[]", style=dashed]; +"2475 4676" -> "2476 4677" [label="[]", style=dashed]; +"2476 4677" -> "2477 4678" [label="[]", style=solid]; +"2477 4678" -> "2478 4679" [label="[-1, -1]", style=dashed]; +"2478 4679" -> "2479 4680" [label="[-1, -1]", style=dashed]; +"2479 4680" -> "2480 4683" [label="[-1]", style=dashed]; +"2479 4680" -> "2486 4691" [label="[-1]", style=dashed]; +"2480 4683" -> "2482 4684" [label="[-1]", style=dashed]; +"2481 4682" -> "2482 4684" [label="[]", style=solid]; +"2482 4684" -> "2483 4693" [label="[]", style=solid]; +"2482 4684" -> "2491 4704" [label="[]", style=solid]; +"2483 4693" -> "2484 4694" [label="[]", style=solid]; +"2484 4694" -> "2488 4697" [label="[]", style=solid]; +"2485 4689" -> "2486 4691" [label="[]", style=solid]; +"2486 4691" -> "2487 4692" [label="[]", style=solid]; +"2486 4691" -> "3268 4702" [label="[]", style=solid]; +"2487 4692" -> "2488 4697" [label="[]", style=solid]; +"2488 4697" -> "2489 4699" [label="[-1, 3]", style=dashed]; +"2489 4699" -> "2490 4700" [label="[-1, 1]", style=dashed]; +"2490 4700" -> "2491 4704" [label="[-1]", style=dashed]; +"2490 4700" -> "3267 4701" [label="[-1]", style=dashed]; +"2491 4704" -> "3176 6520" [label="[]", style=solid]; +"2492 4630" -> "2493 4632" [label="[]", style=dashed]; +"2493 4632" -> "2494 4633" [label="[]", style=dashed]; +"2494 4633" -> "2495 4634" [label="[]", style=solid]; +"2495 4634" -> "2496 4635" [label="[-1, -1]", style=dashed]; +"2496 4635" -> "2497 4636" [label="[-1, -1]", style=dashed]; +"2497 4636" -> "2498 4639" [label="[-1]", style=dashed]; +"2497 4636" -> "2504 4647" [label="[-1]", style=dashed]; +"2498 4639" -> "2500 4640" [label="[-1]", style=dashed]; +"2499 4638" -> "2500 4640" [label="[]", style=solid]; +"2500 4640" -> "2501 4649" [label="[]", style=solid]; +"2500 4640" -> "2509 4660" [label="[]", style=solid]; +"2501 4649" -> "2502 4650" [label="[]", style=solid]; +"2502 4650" -> "2506 4653" [label="[]", style=solid]; +"2503 4645" -> "2504 4647" [label="[]", style=solid]; +"2504 4647" -> "2505 4648" [label="[]", style=solid]; +"2504 4647" -> "3270 4658" [label="[]", style=solid]; +"2505 4648" -> "2506 4653" [label="[]", style=solid]; +"2506 4653" -> "2507 4655" [label="[-1, 3]", style=dashed]; +"2507 4655" -> "2508 4656" [label="[-1, 1]", style=dashed]; +"2508 4656" -> "2509 4660" [label="[-1]", style=dashed]; +"2508 4656" -> "3269 4657" [label="[-1]", style=dashed]; +"2509 4660" -> "3176 6520" [label="[]", style=solid]; +"2510 4586" -> "2511 4588" [label="[]", style=dashed]; +"2511 4588" -> "2512 4589" [label="[]", style=dashed]; +"2512 4589" -> "2513 4590" [label="[]", style=solid]; +"2513 4590" -> "2514 4591" [label="[-1, -1]", style=dashed]; +"2514 4591" -> "2515 4592" [label="[-1, -1]", style=dashed]; +"2515 4592" -> "2516 4595" [label="[-1]", style=dashed]; +"2515 4592" -> "2522 4603" [label="[-1]", style=dashed]; +"2516 4595" -> "2518 4596" [label="[-1]", style=dashed]; +"2517 4594" -> "2518 4596" [label="[]", style=solid]; +"2518 4596" -> "2519 4605" [label="[]", style=solid]; +"2518 4596" -> "2527 4616" [label="[]", style=solid]; +"2519 4605" -> "2520 4606" [label="[]", style=solid]; +"2520 4606" -> "2524 4609" [label="[]", style=solid]; +"2521 4601" -> "2522 4603" [label="[]", style=solid]; +"2522 4603" -> "2523 4604" [label="[]", style=solid]; +"2522 4603" -> "3272 4614" [label="[]", style=solid]; +"2523 4604" -> "2524 4609" [label="[]", style=solid]; +"2524 4609" -> "2525 4611" [label="[-1, 3]", style=dashed]; +"2525 4611" -> "2526 4612" [label="[-1, 1]", style=dashed]; +"2526 4612" -> "2527 4616" [label="[-1]", style=dashed]; +"2526 4612" -> "3271 4613" [label="[-1]", style=dashed]; +"2527 4616" -> "3176 6520" [label="[]", style=solid]; +"2528 4542" -> "2529 4544" [label="[]", style=dashed]; +"2529 4544" -> "2530 4545" [label="[]", style=dashed]; +"2530 4545" -> "2531 4546" [label="[]", style=solid]; +"2531 4546" -> "2532 4547" [label="[-1, -1]", style=dashed]; +"2532 4547" -> "2533 4548" [label="[-1, -1]", style=dashed]; +"2533 4548" -> "2534 4551" [label="[-1]", style=dashed]; +"2533 4548" -> "2540 4559" [label="[-1]", style=dashed]; +"2534 4551" -> "2536 4552" [label="[-1]", style=dashed]; +"2535 4550" -> "2536 4552" [label="[]", style=solid]; +"2536 4552" -> "2537 4561" [label="[]", style=solid]; +"2536 4552" -> "2545 4572" [label="[]", style=solid]; +"2537 4561" -> "2538 4562" [label="[]", style=solid]; +"2538 4562" -> "2542 4565" [label="[]", style=solid]; +"2539 4557" -> "2540 4559" [label="[]", style=solid]; +"2540 4559" -> "2541 4560" [label="[]", style=solid]; +"2540 4559" -> "3274 4570" [label="[]", style=solid]; +"2541 4560" -> "2542 4565" [label="[]", style=solid]; +"2542 4565" -> "2543 4567" [label="[-1, 3]", style=dashed]; +"2543 4567" -> "2544 4568" [label="[-1, 1]", style=dashed]; +"2544 4568" -> "2545 4572" [label="[-1]", style=dashed]; +"2544 4568" -> "3273 4569" [label="[-1]", style=dashed]; +"2545 4572" -> "3176 6520" [label="[]", style=solid]; +"2546 4498" -> "2547 4500" [label="[]", style=dashed]; +"2547 4500" -> "2548 4501" [label="[]", style=dashed]; +"2548 4501" -> "2549 4502" [label="[]", style=solid]; +"2549 4502" -> "2550 4503" [label="[-1, -1]", style=dashed]; +"2550 4503" -> "2551 4504" [label="[-1, -1]", style=dashed]; +"2551 4504" -> "2552 4507" [label="[-1]", style=dashed]; +"2551 4504" -> "2558 4515" [label="[-1]", style=dashed]; +"2552 4507" -> "2554 4508" [label="[-1]", style=dashed]; +"2553 4506" -> "2554 4508" [label="[]", style=solid]; +"2554 4508" -> "2555 4517" [label="[]", style=solid]; +"2554 4508" -> "2563 4528" [label="[]", style=solid]; +"2555 4517" -> "2556 4518" [label="[]", style=solid]; +"2556 4518" -> "2560 4521" [label="[]", style=solid]; +"2557 4513" -> "2558 4515" [label="[]", style=solid]; +"2558 4515" -> "2559 4516" [label="[]", style=solid]; +"2558 4515" -> "3276 4526" [label="[]", style=solid]; +"2559 4516" -> "2560 4521" [label="[]", style=solid]; +"2560 4521" -> "2561 4523" [label="[-1, 3]", style=dashed]; +"2561 4523" -> "2562 4524" [label="[-1, 1]", style=dashed]; +"2562 4524" -> "2563 4528" [label="[-1]", style=dashed]; +"2562 4524" -> "3275 4525" [label="[-1]", style=dashed]; +"2563 4528" -> "3176 6520" [label="[]", style=solid]; +"2564 4454" -> "2565 4456" [label="[]", style=dashed]; +"2565 4456" -> "2566 4457" [label="[]", style=dashed]; +"2566 4457" -> "2567 4458" [label="[]", style=solid]; +"2567 4458" -> "2568 4459" [label="[-1, -1]", style=dashed]; +"2568 4459" -> "2569 4460" [label="[-1, -1]", style=dashed]; +"2569 4460" -> "2570 4463" [label="[-1]", style=dashed]; +"2569 4460" -> "2576 4471" [label="[-1]", style=dashed]; +"2570 4463" -> "2572 4464" [label="[-1]", style=dashed]; +"2571 4462" -> "2572 4464" [label="[]", style=solid]; +"2572 4464" -> "2573 4473" [label="[]", style=solid]; +"2572 4464" -> "2581 4484" [label="[]", style=solid]; +"2573 4473" -> "2574 4474" [label="[]", style=solid]; +"2574 4474" -> "2578 4477" [label="[]", style=solid]; +"2575 4469" -> "2576 4471" [label="[]", style=solid]; +"2576 4471" -> "2577 4472" [label="[]", style=solid]; +"2576 4471" -> "3278 4482" [label="[]", style=solid]; +"2577 4472" -> "2578 4477" [label="[]", style=solid]; +"2578 4477" -> "2579 4479" [label="[-1, 3]", style=dashed]; +"2579 4479" -> "2580 4480" [label="[-1, 1]", style=dashed]; +"2580 4480" -> "2581 4484" [label="[-1]", style=dashed]; +"2580 4480" -> "3277 4481" [label="[-1]", style=dashed]; +"2581 4484" -> "3176 6520" [label="[]", style=solid]; +"2582 4410" -> "2583 4412" [label="[]", style=dashed]; +"2583 4412" -> "2584 4413" [label="[]", style=dashed]; +"2584 4413" -> "2585 4414" [label="[]", style=solid]; +"2585 4414" -> "2586 4415" [label="[-1, -1]", style=dashed]; +"2586 4415" -> "2587 4416" [label="[-1, -1]", style=dashed]; +"2587 4416" -> "2588 4419" [label="[-1]", style=dashed]; +"2587 4416" -> "2594 4427" [label="[-1]", style=dashed]; +"2588 4419" -> "2590 4420" [label="[-1]", style=dashed]; +"2589 4418" -> "2590 4420" [label="[]", style=solid]; +"2590 4420" -> "2591 4429" [label="[]", style=solid]; +"2590 4420" -> "2599 4440" [label="[]", style=solid]; +"2591 4429" -> "2592 4430" [label="[]", style=solid]; +"2592 4430" -> "2596 4433" [label="[]", style=solid]; +"2593 4425" -> "2594 4427" [label="[]", style=solid]; +"2594 4427" -> "2595 4428" [label="[]", style=solid]; +"2594 4427" -> "3280 4438" [label="[]", style=solid]; +"2595 4428" -> "2596 4433" [label="[]", style=solid]; +"2596 4433" -> "2597 4435" [label="[-1, 3]", style=dashed]; +"2597 4435" -> "2598 4436" [label="[-1, 1]", style=dashed]; +"2598 4436" -> "2599 4440" [label="[-1]", style=dashed]; +"2598 4436" -> "3279 4437" [label="[-1]", style=dashed]; +"2599 4440" -> "3176 6520" [label="[]", style=solid]; +"2600 4366" -> "2601 4368" [label="[]", style=dashed]; +"2601 4368" -> "2602 4369" [label="[]", style=dashed]; +"2602 4369" -> "2603 4370" [label="[]", style=solid]; +"2603 4370" -> "2604 4371" [label="[-1, -1]", style=dashed]; +"2604 4371" -> "2605 4372" [label="[-1, -1]", style=dashed]; +"2605 4372" -> "2606 4375" [label="[-1]", style=dashed]; +"2605 4372" -> "2612 4383" [label="[-1]", style=dashed]; +"2606 4375" -> "2608 4376" [label="[-1]", style=dashed]; +"2607 4374" -> "2608 4376" [label="[]", style=solid]; +"2608 4376" -> "2609 4385" [label="[]", style=solid]; +"2608 4376" -> "2617 4396" [label="[]", style=solid]; +"2609 4385" -> "2610 4386" [label="[]", style=solid]; +"2610 4386" -> "2614 4389" [label="[]", style=solid]; +"2611 4381" -> "2612 4383" [label="[]", style=solid]; +"2612 4383" -> "2613 4384" [label="[]", style=solid]; +"2612 4383" -> "3282 4394" [label="[]", style=solid]; +"2613 4384" -> "2614 4389" [label="[]", style=solid]; +"2614 4389" -> "2615 4391" [label="[-1, 3]", style=dashed]; +"2615 4391" -> "2616 4392" [label="[-1, 1]", style=dashed]; +"2616 4392" -> "2617 4396" [label="[-1]", style=dashed]; +"2616 4392" -> "3281 4393" [label="[-1]", style=dashed]; +"2617 4396" -> "3176 6520" [label="[]", style=solid]; +"2618 4322" -> "2619 4324" [label="[]", style=dashed]; +"2619 4324" -> "2620 4325" [label="[]", style=dashed]; +"2620 4325" -> "2621 4326" [label="[]", style=solid]; +"2621 4326" -> "2622 4327" [label="[-1, -1]", style=dashed]; +"2622 4327" -> "2623 4328" [label="[-1, -1]", style=dashed]; +"2623 4328" -> "2624 4331" [label="[-1]", style=dashed]; +"2623 4328" -> "2630 4339" [label="[-1]", style=dashed]; +"2624 4331" -> "2626 4332" [label="[-1]", style=dashed]; +"2625 4330" -> "2626 4332" [label="[]", style=solid]; +"2626 4332" -> "2627 4341" [label="[]", style=solid]; +"2626 4332" -> "2635 4352" [label="[]", style=solid]; +"2627 4341" -> "2628 4342" [label="[]", style=solid]; +"2628 4342" -> "2632 4345" [label="[]", style=solid]; +"2629 4337" -> "2630 4339" [label="[]", style=solid]; +"2630 4339" -> "2631 4340" [label="[]", style=solid]; +"2630 4339" -> "3284 4350" [label="[]", style=solid]; +"2631 4340" -> "2632 4345" [label="[]", style=solid]; +"2632 4345" -> "2633 4347" [label="[-1, 3]", style=dashed]; +"2633 4347" -> "2634 4348" [label="[-1, 1]", style=dashed]; +"2634 4348" -> "2635 4352" [label="[-1]", style=dashed]; +"2634 4348" -> "3283 4349" [label="[-1]", style=dashed]; +"2635 4352" -> "3176 6520" [label="[]", style=solid]; +"2636 4278" -> "2637 4280" [label="[]", style=dashed]; +"2637 4280" -> "2638 4281" [label="[]", style=dashed]; +"2638 4281" -> "2639 4282" [label="[]", style=solid]; +"2639 4282" -> "2640 4283" [label="[-1, -1]", style=dashed]; +"2640 4283" -> "2641 4284" [label="[-1, -1]", style=dashed]; +"2641 4284" -> "2642 4287" [label="[-1]", style=dashed]; +"2641 4284" -> "2648 4295" [label="[-1]", style=dashed]; +"2642 4287" -> "2644 4288" [label="[-1]", style=dashed]; +"2643 4286" -> "2644 4288" [label="[]", style=solid]; +"2644 4288" -> "2645 4297" [label="[]", style=solid]; +"2644 4288" -> "2653 4308" [label="[]", style=solid]; +"2645 4297" -> "2646 4298" [label="[]", style=solid]; +"2646 4298" -> "2650 4301" [label="[]", style=solid]; +"2647 4293" -> "2648 4295" [label="[]", style=solid]; +"2648 4295" -> "2649 4296" [label="[]", style=solid]; +"2648 4295" -> "3286 4306" [label="[]", style=solid]; +"2649 4296" -> "2650 4301" [label="[]", style=solid]; +"2650 4301" -> "2651 4303" [label="[-1, 3]", style=dashed]; +"2651 4303" -> "2652 4304" [label="[-1, 1]", style=dashed]; +"2652 4304" -> "2653 4308" [label="[-1]", style=dashed]; +"2652 4304" -> "3285 4305" [label="[-1]", style=dashed]; +"2653 4308" -> "3176 6520" [label="[]", style=solid]; +"2654 4234" -> "2655 4236" [label="[]", style=dashed]; +"2655 4236" -> "2656 4237" [label="[]", style=dashed]; +"2656 4237" -> "2657 4238" [label="[]", style=solid]; +"2657 4238" -> "2658 4239" [label="[-1, -1]", style=dashed]; +"2658 4239" -> "2659 4240" [label="[-1, -1]", style=dashed]; +"2659 4240" -> "2660 4243" [label="[-1]", style=dashed]; +"2659 4240" -> "2666 4251" [label="[-1]", style=dashed]; +"2660 4243" -> "2662 4244" [label="[-1]", style=dashed]; +"2661 4242" -> "2662 4244" [label="[]", style=solid]; +"2662 4244" -> "2663 4253" [label="[]", style=solid]; +"2662 4244" -> "2671 4264" [label="[]", style=solid]; +"2663 4253" -> "2664 4254" [label="[]", style=solid]; +"2664 4254" -> "2668 4257" [label="[]", style=solid]; +"2665 4249" -> "2666 4251" [label="[]", style=solid]; +"2666 4251" -> "2667 4252" [label="[]", style=solid]; +"2666 4251" -> "3288 4262" [label="[]", style=solid]; +"2667 4252" -> "2668 4257" [label="[]", style=solid]; +"2668 4257" -> "2669 4259" [label="[-1, 3]", style=dashed]; +"2669 4259" -> "2670 4260" [label="[-1, 1]", style=dashed]; +"2670 4260" -> "2671 4264" [label="[-1]", style=dashed]; +"2670 4260" -> "3287 4261" [label="[-1]", style=dashed]; +"2671 4264" -> "3176 6520" [label="[]", style=solid]; +"2672 4190" -> "2673 4192" [label="[]", style=dashed]; +"2673 4192" -> "2674 4193" [label="[]", style=dashed]; +"2674 4193" -> "2675 4194" [label="[]", style=solid]; +"2675 4194" -> "2676 4195" [label="[-1, -1]", style=dashed]; +"2676 4195" -> "2677 4196" [label="[-1, -1]", style=dashed]; +"2677 4196" -> "2678 4199" [label="[-1]", style=dashed]; +"2677 4196" -> "2684 4207" [label="[-1]", style=dashed]; +"2678 4199" -> "2680 4200" [label="[-1]", style=dashed]; +"2679 4198" -> "2680 4200" [label="[]", style=solid]; +"2680 4200" -> "2681 4209" [label="[]", style=solid]; +"2680 4200" -> "2689 4220" [label="[]", style=solid]; +"2681 4209" -> "2682 4210" [label="[]", style=solid]; +"2682 4210" -> "2686 4213" [label="[]", style=solid]; +"2683 4205" -> "2684 4207" [label="[]", style=solid]; +"2684 4207" -> "2685 4208" [label="[]", style=solid]; +"2684 4207" -> "3290 4218" [label="[]", style=solid]; +"2685 4208" -> "2686 4213" [label="[]", style=solid]; +"2686 4213" -> "2687 4215" [label="[-1, 3]", style=dashed]; +"2687 4215" -> "2688 4216" [label="[-1, 1]", style=dashed]; +"2688 4216" -> "2689 4220" [label="[-1]", style=dashed]; +"2688 4216" -> "3289 4217" [label="[-1]", style=dashed]; +"2689 4220" -> "3176 6520" [label="[]", style=solid]; +"2690 4146" -> "2691 4148" [label="[]", style=dashed]; +"2691 4148" -> "2692 4149" [label="[]", style=dashed]; +"2692 4149" -> "2693 4150" [label="[]", style=solid]; +"2693 4150" -> "2694 4151" [label="[-1, -1]", style=dashed]; +"2694 4151" -> "2695 4152" [label="[-1, -1]", style=dashed]; +"2695 4152" -> "2696 4155" [label="[-1]", style=dashed]; +"2695 4152" -> "2702 4163" [label="[-1]", style=dashed]; +"2696 4155" -> "2698 4156" [label="[-1]", style=dashed]; +"2697 4154" -> "2698 4156" [label="[]", style=solid]; +"2698 4156" -> "2699 4165" [label="[]", style=solid]; +"2698 4156" -> "2707 4176" [label="[]", style=solid]; +"2699 4165" -> "2700 4166" [label="[]", style=solid]; +"2700 4166" -> "2704 4169" [label="[]", style=solid]; +"2701 4161" -> "2702 4163" [label="[]", style=solid]; +"2702 4163" -> "2703 4164" [label="[]", style=solid]; +"2702 4163" -> "3292 4174" [label="[]", style=solid]; +"2703 4164" -> "2704 4169" [label="[]", style=solid]; +"2704 4169" -> "2705 4171" [label="[-1, 3]", style=dashed]; +"2705 4171" -> "2706 4172" [label="[-1, 1]", style=dashed]; +"2706 4172" -> "2707 4176" [label="[-1]", style=dashed]; +"2706 4172" -> "3291 4173" [label="[-1]", style=dashed]; +"2707 4176" -> "3176 6520" [label="[]", style=solid]; +"2708 4102" -> "2709 4104" [label="[]", style=dashed]; +"2709 4104" -> "2710 4105" [label="[]", style=dashed]; +"2710 4105" -> "2711 4106" [label="[]", style=solid]; +"2711 4106" -> "2712 4107" [label="[-1, -1]", style=dashed]; +"2712 4107" -> "2713 4108" [label="[-1, -1]", style=dashed]; +"2713 4108" -> "2714 4111" [label="[-1]", style=dashed]; +"2713 4108" -> "2720 4119" [label="[-1]", style=dashed]; +"2714 4111" -> "2716 4112" [label="[-1]", style=dashed]; +"2715 4110" -> "2716 4112" [label="[]", style=solid]; +"2716 4112" -> "2717 4121" [label="[]", style=solid]; +"2716 4112" -> "2725 4132" [label="[]", style=solid]; +"2717 4121" -> "2718 4122" [label="[]", style=solid]; +"2718 4122" -> "2722 4125" [label="[]", style=solid]; +"2719 4117" -> "2720 4119" [label="[]", style=solid]; +"2720 4119" -> "2721 4120" [label="[]", style=solid]; +"2720 4119" -> "3294 4130" [label="[]", style=solid]; +"2721 4120" -> "2722 4125" [label="[]", style=solid]; +"2722 4125" -> "2723 4127" [label="[-1, 3]", style=dashed]; +"2723 4127" -> "2724 4128" [label="[-1, 1]", style=dashed]; +"2724 4128" -> "2725 4132" [label="[-1]", style=dashed]; +"2724 4128" -> "3293 4129" [label="[-1]", style=dashed]; +"2725 4132" -> "3176 6520" [label="[]", style=solid]; +"2726 4058" -> "2727 4060" [label="[]", style=dashed]; +"2727 4060" -> "2728 4061" [label="[]", style=dashed]; +"2728 4061" -> "2729 4062" [label="[]", style=solid]; +"2729 4062" -> "2730 4063" [label="[-1, -1]", style=dashed]; +"2730 4063" -> "2731 4064" [label="[-1, -1]", style=dashed]; +"2731 4064" -> "2732 4067" [label="[-1]", style=dashed]; +"2731 4064" -> "2738 4075" [label="[-1]", style=dashed]; +"2732 4067" -> "2734 4068" [label="[-1]", style=dashed]; +"2733 4066" -> "2734 4068" [label="[]", style=solid]; +"2734 4068" -> "2735 4077" [label="[]", style=solid]; +"2734 4068" -> "2743 4088" [label="[]", style=solid]; +"2735 4077" -> "2736 4078" [label="[]", style=solid]; +"2736 4078" -> "2740 4081" [label="[]", style=solid]; +"2737 4073" -> "2738 4075" [label="[]", style=solid]; +"2738 4075" -> "2739 4076" [label="[]", style=solid]; +"2738 4075" -> "3296 4086" [label="[]", style=solid]; +"2739 4076" -> "2740 4081" [label="[]", style=solid]; +"2740 4081" -> "2741 4083" [label="[-1, 3]", style=dashed]; +"2741 4083" -> "2742 4084" [label="[-1, 1]", style=dashed]; +"2742 4084" -> "2743 4088" [label="[-1]", style=dashed]; +"2742 4084" -> "3295 4085" [label="[-1]", style=dashed]; +"2743 4088" -> "3176 6520" [label="[]", style=solid]; +"2744 4014" -> "2745 4016" [label="[]", style=dashed]; +"2745 4016" -> "2746 4017" [label="[]", style=dashed]; +"2746 4017" -> "2747 4018" [label="[]", style=solid]; +"2747 4018" -> "2748 4019" [label="[-1, -1]", style=dashed]; +"2748 4019" -> "2749 4020" [label="[-1, -1]", style=dashed]; +"2749 4020" -> "2750 4023" [label="[-1]", style=dashed]; +"2749 4020" -> "2756 4031" [label="[-1]", style=dashed]; +"2750 4023" -> "2752 4024" [label="[-1]", style=dashed]; +"2751 4022" -> "2752 4024" [label="[]", style=solid]; +"2752 4024" -> "2753 4033" [label="[]", style=solid]; +"2752 4024" -> "2761 4044" [label="[]", style=solid]; +"2753 4033" -> "2754 4034" [label="[]", style=solid]; +"2754 4034" -> "2758 4037" [label="[]", style=solid]; +"2755 4029" -> "2756 4031" [label="[]", style=solid]; +"2756 4031" -> "2757 4032" [label="[]", style=solid]; +"2756 4031" -> "3298 4042" [label="[]", style=solid]; +"2757 4032" -> "2758 4037" [label="[]", style=solid]; +"2758 4037" -> "2759 4039" [label="[-1, 3]", style=dashed]; +"2759 4039" -> "2760 4040" [label="[-1, 1]", style=dashed]; +"2760 4040" -> "2761 4044" [label="[-1]", style=dashed]; +"2760 4040" -> "3297 4041" [label="[-1]", style=dashed]; +"2761 4044" -> "3176 6520" [label="[]", style=solid]; +"2762 3970" -> "2763 3972" [label="[]", style=dashed]; +"2763 3972" -> "2764 3973" [label="[]", style=dashed]; +"2764 3973" -> "2765 3974" [label="[]", style=solid]; +"2765 3974" -> "2766 3975" [label="[-1, -1]", style=dashed]; +"2766 3975" -> "2767 3976" [label="[-1, -1]", style=dashed]; +"2767 3976" -> "2768 3979" [label="[-1]", style=dashed]; +"2767 3976" -> "2774 3987" [label="[-1]", style=dashed]; +"2768 3979" -> "2770 3980" [label="[-1]", style=dashed]; +"2769 3978" -> "2770 3980" [label="[]", style=solid]; +"2770 3980" -> "2771 3989" [label="[]", style=solid]; +"2770 3980" -> "2779 4000" [label="[]", style=solid]; +"2771 3989" -> "2772 3990" [label="[]", style=solid]; +"2772 3990" -> "2776 3993" [label="[]", style=solid]; +"2773 3985" -> "2774 3987" [label="[]", style=solid]; +"2774 3987" -> "2775 3988" [label="[]", style=solid]; +"2774 3987" -> "3300 3998" [label="[]", style=solid]; +"2775 3988" -> "2776 3993" [label="[]", style=solid]; +"2776 3993" -> "2777 3995" [label="[-1, 3]", style=dashed]; +"2777 3995" -> "2778 3996" [label="[-1, 1]", style=dashed]; +"2778 3996" -> "2779 4000" [label="[-1]", style=dashed]; +"2778 3996" -> "3299 3997" [label="[-1]", style=dashed]; +"2779 4000" -> "3176 6520" [label="[]", style=solid]; +"2780 3926" -> "2781 3928" [label="[]", style=dashed]; +"2781 3928" -> "2782 3929" [label="[]", style=dashed]; +"2782 3929" -> "2783 3930" [label="[]", style=solid]; +"2783 3930" -> "2784 3931" [label="[-1, -1]", style=dashed]; +"2784 3931" -> "2785 3932" [label="[-1, -1]", style=dashed]; +"2785 3932" -> "2786 3935" [label="[-1]", style=dashed]; +"2785 3932" -> "2792 3943" [label="[-1]", style=dashed]; +"2786 3935" -> "2788 3936" [label="[-1]", style=dashed]; +"2787 3934" -> "2788 3936" [label="[]", style=solid]; +"2788 3936" -> "2789 3945" [label="[]", style=solid]; +"2788 3936" -> "2797 3956" [label="[]", style=solid]; +"2789 3945" -> "2790 3946" [label="[]", style=solid]; +"2790 3946" -> "2794 3949" [label="[]", style=solid]; +"2791 3941" -> "2792 3943" [label="[]", style=solid]; +"2792 3943" -> "2793 3944" [label="[]", style=solid]; +"2792 3943" -> "3302 3954" [label="[]", style=solid]; +"2793 3944" -> "2794 3949" [label="[]", style=solid]; +"2794 3949" -> "2795 3951" [label="[-1, 3]", style=dashed]; +"2795 3951" -> "2796 3952" [label="[-1, 1]", style=dashed]; +"2796 3952" -> "2797 3956" [label="[-1]", style=dashed]; +"2796 3952" -> "3301 3953" [label="[-1]", style=dashed]; +"2797 3956" -> "3176 6520" [label="[]", style=solid]; +"2798 3882" -> "2799 3884" [label="[]", style=dashed]; +"2799 3884" -> "2800 3885" [label="[]", style=dashed]; +"2800 3885" -> "2801 3886" [label="[]", style=solid]; +"2801 3886" -> "2802 3887" [label="[-1, -1]", style=dashed]; +"2802 3887" -> "2803 3888" [label="[-1, -1]", style=dashed]; +"2803 3888" -> "2804 3891" [label="[-1]", style=dashed]; +"2803 3888" -> "2810 3899" [label="[-1]", style=dashed]; +"2804 3891" -> "2806 3892" [label="[-1]", style=dashed]; +"2805 3890" -> "2806 3892" [label="[]", style=solid]; +"2806 3892" -> "2807 3901" [label="[]", style=solid]; +"2806 3892" -> "2815 3912" [label="[]", style=solid]; +"2807 3901" -> "2808 3902" [label="[]", style=solid]; +"2808 3902" -> "2812 3905" [label="[]", style=solid]; +"2809 3897" -> "2810 3899" [label="[]", style=solid]; +"2810 3899" -> "2811 3900" [label="[]", style=solid]; +"2810 3899" -> "3304 3910" [label="[]", style=solid]; +"2811 3900" -> "2812 3905" [label="[]", style=solid]; +"2812 3905" -> "2813 3907" [label="[-1, 3]", style=dashed]; +"2813 3907" -> "2814 3908" [label="[-1, 1]", style=dashed]; +"2814 3908" -> "2815 3912" [label="[-1]", style=dashed]; +"2814 3908" -> "3303 3909" [label="[-1]", style=dashed]; +"2815 3912" -> "3176 6520" [label="[]", style=solid]; +"2816 3838" -> "2817 3840" [label="[]", style=dashed]; +"2817 3840" -> "2818 3841" [label="[]", style=dashed]; +"2818 3841" -> "2819 3842" [label="[]", style=solid]; +"2819 3842" -> "2820 3843" [label="[-1, -1]", style=dashed]; +"2820 3843" -> "2821 3844" [label="[-1, -1]", style=dashed]; +"2821 3844" -> "2822 3847" [label="[-1]", style=dashed]; +"2821 3844" -> "2828 3855" [label="[-1]", style=dashed]; +"2822 3847" -> "2824 3848" [label="[-1]", style=dashed]; +"2823 3846" -> "2824 3848" [label="[]", style=solid]; +"2824 3848" -> "2825 3857" [label="[]", style=solid]; +"2824 3848" -> "2833 3868" [label="[]", style=solid]; +"2825 3857" -> "2826 3858" [label="[]", style=solid]; +"2826 3858" -> "2830 3861" [label="[]", style=solid]; +"2827 3853" -> "2828 3855" [label="[]", style=solid]; +"2828 3855" -> "2829 3856" [label="[]", style=solid]; +"2828 3855" -> "3306 3866" [label="[]", style=solid]; +"2829 3856" -> "2830 3861" [label="[]", style=solid]; +"2830 3861" -> "2831 3863" [label="[-1, 3]", style=dashed]; +"2831 3863" -> "2832 3864" [label="[-1, 1]", style=dashed]; +"2832 3864" -> "2833 3868" [label="[-1]", style=dashed]; +"2832 3864" -> "3305 3865" [label="[-1]", style=dashed]; +"2833 3868" -> "3176 6520" [label="[]", style=solid]; +"2834 3794" -> "2835 3796" [label="[]", style=dashed]; +"2835 3796" -> "2836 3797" [label="[]", style=dashed]; +"2836 3797" -> "2837 3798" [label="[]", style=solid]; +"2837 3798" -> "2838 3799" [label="[-1, -1]", style=dashed]; +"2838 3799" -> "2839 3800" [label="[-1, -1]", style=dashed]; +"2839 3800" -> "2840 3803" [label="[-1]", style=dashed]; +"2839 3800" -> "2846 3811" [label="[-1]", style=dashed]; +"2840 3803" -> "2842 3804" [label="[-1]", style=dashed]; +"2841 3802" -> "2842 3804" [label="[]", style=solid]; +"2842 3804" -> "2843 3813" [label="[]", style=solid]; +"2842 3804" -> "2851 3824" [label="[]", style=solid]; +"2843 3813" -> "2844 3814" [label="[]", style=solid]; +"2844 3814" -> "2848 3817" [label="[]", style=solid]; +"2845 3809" -> "2846 3811" [label="[]", style=solid]; +"2846 3811" -> "2847 3812" [label="[]", style=solid]; +"2846 3811" -> "3308 3822" [label="[]", style=solid]; +"2847 3812" -> "2848 3817" [label="[]", style=solid]; +"2848 3817" -> "2849 3819" [label="[-1, 3]", style=dashed]; +"2849 3819" -> "2850 3820" [label="[-1, 1]", style=dashed]; +"2850 3820" -> "2851 3824" [label="[-1]", style=dashed]; +"2850 3820" -> "3307 3821" [label="[-1]", style=dashed]; +"2851 3824" -> "3176 6520" [label="[]", style=solid]; +"2852 3750" -> "2853 3752" [label="[]", style=dashed]; +"2853 3752" -> "2854 3753" [label="[]", style=dashed]; +"2854 3753" -> "2855 3754" [label="[]", style=solid]; +"2855 3754" -> "2856 3755" [label="[-1, -1]", style=dashed]; +"2856 3755" -> "2857 3756" [label="[-1, -1]", style=dashed]; +"2857 3756" -> "2858 3759" [label="[-1]", style=dashed]; +"2857 3756" -> "2864 3767" [label="[-1]", style=dashed]; +"2858 3759" -> "2860 3760" [label="[-1]", style=dashed]; +"2859 3758" -> "2860 3760" [label="[]", style=solid]; +"2860 3760" -> "2861 3769" [label="[]", style=solid]; +"2860 3760" -> "2869 3780" [label="[]", style=solid]; +"2861 3769" -> "2862 3770" [label="[]", style=solid]; +"2862 3770" -> "2866 3773" [label="[]", style=solid]; +"2863 3765" -> "2864 3767" [label="[]", style=solid]; +"2864 3767" -> "2865 3768" [label="[]", style=solid]; +"2864 3767" -> "3310 3778" [label="[]", style=solid]; +"2865 3768" -> "2866 3773" [label="[]", style=solid]; +"2866 3773" -> "2867 3775" [label="[-1, 3]", style=dashed]; +"2867 3775" -> "2868 3776" [label="[-1, 1]", style=dashed]; +"2868 3776" -> "2869 3780" [label="[-1]", style=dashed]; +"2868 3776" -> "3309 3777" [label="[-1]", style=dashed]; +"2869 3780" -> "3176 6520" [label="[]", style=solid]; +"2870 3706" -> "2871 3708" [label="[]", style=dashed]; +"2871 3708" -> "2872 3709" [label="[]", style=dashed]; +"2872 3709" -> "2873 3710" [label="[]", style=solid]; +"2873 3710" -> "2874 3711" [label="[-1, -1]", style=dashed]; +"2874 3711" -> "2875 3712" [label="[-1, -1]", style=dashed]; +"2875 3712" -> "2876 3715" [label="[-1]", style=dashed]; +"2875 3712" -> "2882 3723" [label="[-1]", style=dashed]; +"2876 3715" -> "2878 3716" [label="[-1]", style=dashed]; +"2877 3714" -> "2878 3716" [label="[]", style=solid]; +"2878 3716" -> "2879 3725" [label="[]", style=solid]; +"2878 3716" -> "2887 3736" [label="[]", style=solid]; +"2879 3725" -> "2880 3726" [label="[]", style=solid]; +"2880 3726" -> "2884 3729" [label="[]", style=solid]; +"2881 3721" -> "2882 3723" [label="[]", style=solid]; +"2882 3723" -> "2883 3724" [label="[]", style=solid]; +"2882 3723" -> "3312 3734" [label="[]", style=solid]; +"2883 3724" -> "2884 3729" [label="[]", style=solid]; +"2884 3729" -> "2885 3731" [label="[-1, 3]", style=dashed]; +"2885 3731" -> "2886 3732" [label="[-1, 1]", style=dashed]; +"2886 3732" -> "2887 3736" [label="[-1]", style=dashed]; +"2886 3732" -> "3311 3733" [label="[-1]", style=dashed]; +"2887 3736" -> "3176 6520" [label="[]", style=solid]; +"2888 3662" -> "2889 3664" [label="[]", style=dashed]; +"2889 3664" -> "2890 3665" [label="[]", style=dashed]; +"2890 3665" -> "2891 3666" [label="[]", style=solid]; +"2891 3666" -> "2892 3667" [label="[-1, -1]", style=dashed]; +"2892 3667" -> "2893 3668" [label="[-1, -1]", style=dashed]; +"2893 3668" -> "2894 3671" [label="[-1]", style=dashed]; +"2893 3668" -> "2900 3679" [label="[-1]", style=dashed]; +"2894 3671" -> "2896 3672" [label="[-1]", style=dashed]; +"2895 3670" -> "2896 3672" [label="[]", style=solid]; +"2896 3672" -> "2897 3681" [label="[]", style=solid]; +"2896 3672" -> "2905 3692" [label="[]", style=solid]; +"2897 3681" -> "2898 3682" [label="[]", style=solid]; +"2898 3682" -> "2902 3685" [label="[]", style=solid]; +"2899 3677" -> "2900 3679" [label="[]", style=solid]; +"2900 3679" -> "2901 3680" [label="[]", style=solid]; +"2900 3679" -> "3314 3690" [label="[]", style=solid]; +"2901 3680" -> "2902 3685" [label="[]", style=solid]; +"2902 3685" -> "2903 3687" [label="[-1, 3]", style=dashed]; +"2903 3687" -> "2904 3688" [label="[-1, 1]", style=dashed]; +"2904 3688" -> "2905 3692" [label="[-1]", style=dashed]; +"2904 3688" -> "3313 3689" [label="[-1]", style=dashed]; +"2905 3692" -> "3176 6520" [label="[]", style=solid]; +"2906 3618" -> "2907 3620" [label="[]", style=dashed]; +"2907 3620" -> "2908 3621" [label="[]", style=dashed]; +"2908 3621" -> "2909 3622" [label="[]", style=solid]; +"2909 3622" -> "2910 3623" [label="[-1, -1]", style=dashed]; +"2910 3623" -> "2911 3624" [label="[-1, -1]", style=dashed]; +"2911 3624" -> "2912 3627" [label="[-1]", style=dashed]; +"2911 3624" -> "2918 3635" [label="[-1]", style=dashed]; +"2912 3627" -> "2914 3628" [label="[-1]", style=dashed]; +"2913 3626" -> "2914 3628" [label="[]", style=solid]; +"2914 3628" -> "2915 3637" [label="[]", style=solid]; +"2914 3628" -> "2923 3648" [label="[]", style=solid]; +"2915 3637" -> "2916 3638" [label="[]", style=solid]; +"2916 3638" -> "2920 3641" [label="[]", style=solid]; +"2917 3633" -> "2918 3635" [label="[]", style=solid]; +"2918 3635" -> "2919 3636" [label="[]", style=solid]; +"2918 3635" -> "3316 3646" [label="[]", style=solid]; +"2919 3636" -> "2920 3641" [label="[]", style=solid]; +"2920 3641" -> "2921 3643" [label="[-1, 3]", style=dashed]; +"2921 3643" -> "2922 3644" [label="[-1, 1]", style=dashed]; +"2922 3644" -> "2923 3648" [label="[-1]", style=dashed]; +"2922 3644" -> "3315 3645" [label="[-1]", style=dashed]; +"2923 3648" -> "3176 6520" [label="[]", style=solid]; +"2924 3574" -> "2925 3576" [label="[]", style=dashed]; +"2925 3576" -> "2926 3577" [label="[]", style=dashed]; +"2926 3577" -> "2927 3578" [label="[]", style=solid]; +"2927 3578" -> "2928 3579" [label="[-1, -1]", style=dashed]; +"2928 3579" -> "2929 3580" [label="[-1, -1]", style=dashed]; +"2929 3580" -> "2930 3583" [label="[-1]", style=dashed]; +"2929 3580" -> "2936 3591" [label="[-1]", style=dashed]; +"2930 3583" -> "2932 3584" [label="[-1]", style=dashed]; +"2931 3582" -> "2932 3584" [label="[]", style=solid]; +"2932 3584" -> "2933 3593" [label="[]", style=solid]; +"2932 3584" -> "2941 3604" [label="[]", style=solid]; +"2933 3593" -> "2934 3594" [label="[]", style=solid]; +"2934 3594" -> "2938 3597" [label="[]", style=solid]; +"2935 3589" -> "2936 3591" [label="[]", style=solid]; +"2936 3591" -> "2937 3592" [label="[]", style=solid]; +"2936 3591" -> "3318 3602" [label="[]", style=solid]; +"2937 3592" -> "2938 3597" [label="[]", style=solid]; +"2938 3597" -> "2939 3599" [label="[-1, 3]", style=dashed]; +"2939 3599" -> "2940 3600" [label="[-1, 1]", style=dashed]; +"2940 3600" -> "2941 3604" [label="[-1]", style=dashed]; +"2940 3600" -> "3317 3601" [label="[-1]", style=dashed]; +"2941 3604" -> "3176 6520" [label="[]", style=solid]; +"2942 3530" -> "2943 3532" [label="[]", style=dashed]; +"2943 3532" -> "2944 3533" [label="[]", style=dashed]; +"2944 3533" -> "2945 3534" [label="[]", style=solid]; +"2945 3534" -> "2946 3535" [label="[-1, -1]", style=dashed]; +"2946 3535" -> "2947 3536" [label="[-1, -1]", style=dashed]; +"2947 3536" -> "2948 3539" [label="[-1]", style=dashed]; +"2947 3536" -> "2954 3547" [label="[-1]", style=dashed]; +"2948 3539" -> "2950 3540" [label="[-1]", style=dashed]; +"2949 3538" -> "2950 3540" [label="[]", style=solid]; +"2950 3540" -> "2951 3549" [label="[]", style=solid]; +"2950 3540" -> "2959 3560" [label="[]", style=solid]; +"2951 3549" -> "2952 3550" [label="[]", style=solid]; +"2952 3550" -> "2956 3553" [label="[]", style=solid]; +"2953 3545" -> "2954 3547" [label="[]", style=solid]; +"2954 3547" -> "2955 3548" [label="[]", style=solid]; +"2954 3547" -> "3320 3558" [label="[]", style=solid]; +"2955 3548" -> "2956 3553" [label="[]", style=solid]; +"2956 3553" -> "2957 3555" [label="[-1, 3]", style=dashed]; +"2957 3555" -> "2958 3556" [label="[-1, 1]", style=dashed]; +"2958 3556" -> "2959 3560" [label="[-1]", style=dashed]; +"2958 3556" -> "3319 3557" [label="[-1]", style=dashed]; +"2959 3560" -> "3176 6520" [label="[]", style=solid]; +"2960 3486" -> "2961 3488" [label="[]", style=dashed]; +"2961 3488" -> "2962 3489" [label="[]", style=dashed]; +"2962 3489" -> "2963 3490" [label="[]", style=solid]; +"2963 3490" -> "2964 3491" [label="[-1, -1]", style=dashed]; +"2964 3491" -> "2965 3492" [label="[-1, -1]", style=dashed]; +"2965 3492" -> "2966 3495" [label="[-1]", style=dashed]; +"2965 3492" -> "2972 3503" [label="[-1]", style=dashed]; +"2966 3495" -> "2968 3496" [label="[-1]", style=dashed]; +"2967 3494" -> "2968 3496" [label="[]", style=solid]; +"2968 3496" -> "2969 3505" [label="[]", style=solid]; +"2968 3496" -> "2977 3516" [label="[]", style=solid]; +"2969 3505" -> "2970 3506" [label="[]", style=solid]; +"2970 3506" -> "2974 3509" [label="[]", style=solid]; +"2971 3501" -> "2972 3503" [label="[]", style=solid]; +"2972 3503" -> "2973 3504" [label="[]", style=solid]; +"2972 3503" -> "3322 3514" [label="[]", style=solid]; +"2973 3504" -> "2974 3509" [label="[]", style=solid]; +"2974 3509" -> "2975 3511" [label="[-1, 3]", style=dashed]; +"2975 3511" -> "2976 3512" [label="[-1, 1]", style=dashed]; +"2976 3512" -> "2977 3516" [label="[-1]", style=dashed]; +"2976 3512" -> "3321 3513" [label="[-1]", style=dashed]; +"2977 3516" -> "3176 6520" [label="[]", style=solid]; +"2978 3442" -> "2979 3444" [label="[]", style=dashed]; +"2979 3444" -> "2980 3445" [label="[]", style=dashed]; +"2980 3445" -> "2981 3446" [label="[]", style=solid]; +"2981 3446" -> "2982 3447" [label="[-1, -1]", style=dashed]; +"2982 3447" -> "2983 3448" [label="[-1, -1]", style=dashed]; +"2983 3448" -> "2984 3451" [label="[-1]", style=dashed]; +"2983 3448" -> "2990 3459" [label="[-1]", style=dashed]; +"2984 3451" -> "2986 3452" [label="[-1]", style=dashed]; +"2985 3450" -> "2986 3452" [label="[]", style=solid]; +"2986 3452" -> "2987 3461" [label="[]", style=solid]; +"2986 3452" -> "2995 3472" [label="[]", style=solid]; +"2987 3461" -> "2988 3462" [label="[]", style=solid]; +"2988 3462" -> "2992 3465" [label="[]", style=solid]; +"2989 3457" -> "2990 3459" [label="[]", style=solid]; +"2990 3459" -> "2991 3460" [label="[]", style=solid]; +"2990 3459" -> "3324 3470" [label="[]", style=solid]; +"2991 3460" -> "2992 3465" [label="[]", style=solid]; +"2992 3465" -> "2993 3467" [label="[-1, 3]", style=dashed]; +"2993 3467" -> "2994 3468" [label="[-1, 1]", style=dashed]; +"2994 3468" -> "2995 3472" [label="[-1]", style=dashed]; +"2994 3468" -> "3323 3469" [label="[-1]", style=dashed]; +"2995 3472" -> "3176 6520" [label="[]", style=solid]; +"2996 3398" -> "2997 3400" [label="[]", style=dashed]; +"2997 3400" -> "2998 3401" [label="[]", style=dashed]; +"2998 3401" -> "2999 3402" [label="[]", style=solid]; +"2999 3402" -> "3000 3403" [label="[-1, -1]", style=dashed]; +"3000 3403" -> "3001 3404" [label="[-1, -1]", style=dashed]; +"3001 3404" -> "3002 3407" [label="[-1]", style=dashed]; +"3001 3404" -> "3008 3415" [label="[-1]", style=dashed]; +"3002 3407" -> "3004 3408" [label="[-1]", style=dashed]; +"3003 3406" -> "3004 3408" [label="[]", style=solid]; +"3004 3408" -> "3005 3417" [label="[]", style=solid]; +"3004 3408" -> "3013 3428" [label="[]", style=solid]; +"3005 3417" -> "3006 3418" [label="[]", style=solid]; +"3006 3418" -> "3010 3421" [label="[]", style=solid]; +"3007 3413" -> "3008 3415" [label="[]", style=solid]; +"3008 3415" -> "3009 3416" [label="[]", style=solid]; +"3008 3415" -> "3326 3426" [label="[]", style=solid]; +"3009 3416" -> "3010 3421" [label="[]", style=solid]; +"3010 3421" -> "3011 3423" [label="[-1, 3]", style=dashed]; +"3011 3423" -> "3012 3424" [label="[-1, 1]", style=dashed]; +"3012 3424" -> "3013 3428" [label="[-1]", style=dashed]; +"3012 3424" -> "3325 3425" [label="[-1]", style=dashed]; +"3013 3428" -> "3176 6520" [label="[]", style=solid]; +"3014 3354" -> "3015 3356" [label="[]", style=dashed]; +"3015 3356" -> "3016 3357" [label="[]", style=dashed]; +"3016 3357" -> "3017 3358" [label="[]", style=solid]; +"3017 3358" -> "3018 3359" [label="[-1, -1]", style=dashed]; +"3018 3359" -> "3019 3360" [label="[-1, -1]", style=dashed]; +"3019 3360" -> "3020 3363" [label="[-1]", style=dashed]; +"3019 3360" -> "3026 3371" [label="[-1]", style=dashed]; +"3020 3363" -> "3022 3364" [label="[-1]", style=dashed]; +"3021 3362" -> "3022 3364" [label="[]", style=solid]; +"3022 3364" -> "3023 3373" [label="[]", style=solid]; +"3022 3364" -> "3031 3384" [label="[]", style=solid]; +"3023 3373" -> "3024 3374" [label="[]", style=solid]; +"3024 3374" -> "3028 3377" [label="[]", style=solid]; +"3025 3369" -> "3026 3371" [label="[]", style=solid]; +"3026 3371" -> "3027 3372" [label="[]", style=solid]; +"3026 3371" -> "3328 3382" [label="[]", style=solid]; +"3027 3372" -> "3028 3377" [label="[]", style=solid]; +"3028 3377" -> "3029 3379" [label="[-1, 3]", style=dashed]; +"3029 3379" -> "3030 3380" [label="[-1, 1]", style=dashed]; +"3030 3380" -> "3031 3384" [label="[-1]", style=dashed]; +"3030 3380" -> "3327 3381" [label="[-1]", style=dashed]; +"3031 3384" -> "3176 6520" [label="[]", style=solid]; +"3032 3310" -> "3033 3312" [label="[]", style=dashed]; +"3033 3312" -> "3034 3313" [label="[]", style=dashed]; +"3034 3313" -> "3035 3314" [label="[]", style=solid]; +"3035 3314" -> "3036 3315" [label="[-1, -1]", style=dashed]; +"3036 3315" -> "3037 3316" [label="[-1, -1]", style=dashed]; +"3037 3316" -> "3038 3319" [label="[-1]", style=dashed]; +"3037 3316" -> "3044 3327" [label="[-1]", style=dashed]; +"3038 3319" -> "3040 3320" [label="[-1]", style=dashed]; +"3039 3318" -> "3040 3320" [label="[]", style=solid]; +"3040 3320" -> "3041 3329" [label="[]", style=solid]; +"3040 3320" -> "3049 3340" [label="[]", style=solid]; +"3041 3329" -> "3042 3330" [label="[]", style=solid]; +"3042 3330" -> "3046 3333" [label="[]", style=solid]; +"3043 3325" -> "3044 3327" [label="[]", style=solid]; +"3044 3327" -> "3045 3328" [label="[]", style=solid]; +"3044 3327" -> "3330 3338" [label="[]", style=solid]; +"3045 3328" -> "3046 3333" [label="[]", style=solid]; +"3046 3333" -> "3047 3335" [label="[-1, 3]", style=dashed]; +"3047 3335" -> "3048 3336" [label="[-1, 1]", style=dashed]; +"3048 3336" -> "3049 3340" [label="[-1]", style=dashed]; +"3048 3336" -> "3329 3337" [label="[-1]", style=dashed]; +"3049 3340" -> "3176 6520" [label="[]", style=solid]; +"3050 3266" -> "3051 3268" [label="[]", style=dashed]; +"3051 3268" -> "3052 3269" [label="[]", style=dashed]; +"3052 3269" -> "3053 3270" [label="[]", style=solid]; +"3053 3270" -> "3054 3271" [label="[-1, -1]", style=dashed]; +"3054 3271" -> "3055 3272" [label="[-1, -1]", style=dashed]; +"3055 3272" -> "3056 3275" [label="[-1]", style=dashed]; +"3055 3272" -> "3062 3283" [label="[-1]", style=dashed]; +"3056 3275" -> "3058 3276" [label="[-1]", style=dashed]; +"3057 3274" -> "3058 3276" [label="[]", style=solid]; +"3058 3276" -> "3059 3285" [label="[]", style=solid]; +"3058 3276" -> "3067 3296" [label="[]", style=solid]; +"3059 3285" -> "3060 3286" [label="[]", style=solid]; +"3060 3286" -> "3064 3289" [label="[]", style=solid]; +"3061 3281" -> "3062 3283" [label="[]", style=solid]; +"3062 3283" -> "3063 3284" [label="[]", style=solid]; +"3062 3283" -> "3332 3294" [label="[]", style=solid]; +"3063 3284" -> "3064 3289" [label="[]", style=solid]; +"3064 3289" -> "3065 3291" [label="[-1, 3]", style=dashed]; +"3065 3291" -> "3066 3292" [label="[-1, 1]", style=dashed]; +"3066 3292" -> "3067 3296" [label="[-1]", style=dashed]; +"3066 3292" -> "3331 3293" [label="[-1]", style=dashed]; +"3067 3296" -> "3176 6520" [label="[]", style=solid]; +"3068 3222" -> "3069 3224" [label="[]", style=dashed]; +"3069 3224" -> "3070 3225" [label="[]", style=dashed]; +"3070 3225" -> "3071 3226" [label="[]", style=solid]; +"3071 3226" -> "3072 3227" [label="[-1, -1]", style=dashed]; +"3072 3227" -> "3073 3228" [label="[-1, -1]", style=dashed]; +"3073 3228" -> "3074 3231" [label="[-1]", style=dashed]; +"3073 3228" -> "3080 3239" [label="[-1]", style=dashed]; +"3074 3231" -> "3076 3232" [label="[-1]", style=dashed]; +"3075 3230" -> "3076 3232" [label="[]", style=solid]; +"3076 3232" -> "3077 3241" [label="[]", style=solid]; +"3076 3232" -> "3085 3252" [label="[]", style=solid]; +"3077 3241" -> "3078 3242" [label="[]", style=solid]; +"3078 3242" -> "3082 3245" [label="[]", style=solid]; +"3079 3237" -> "3080 3239" [label="[]", style=solid]; +"3080 3239" -> "3081 3240" [label="[]", style=solid]; +"3080 3239" -> "3334 3250" [label="[]", style=solid]; +"3081 3240" -> "3082 3245" [label="[]", style=solid]; +"3082 3245" -> "3083 3247" [label="[-1, 3]", style=dashed]; +"3083 3247" -> "3084 3248" [label="[-1, 1]", style=dashed]; +"3084 3248" -> "3085 3252" [label="[-1]", style=dashed]; +"3084 3248" -> "3333 3249" [label="[-1]", style=dashed]; +"3085 3252" -> "3176 6520" [label="[]", style=solid]; +"3086 3178" -> "3087 3180" [label="[]", style=dashed]; +"3087 3180" -> "3088 3181" [label="[]", style=dashed]; +"3088 3181" -> "3089 3182" [label="[]", style=solid]; +"3089 3182" -> "3090 3183" [label="[-1, -1]", style=dashed]; +"3090 3183" -> "3091 3184" [label="[-1, -1]", style=dashed]; +"3091 3184" -> "3092 3187" [label="[-1]", style=dashed]; +"3091 3184" -> "3098 3195" [label="[-1]", style=dashed]; +"3092 3187" -> "3094 3188" [label="[-1]", style=dashed]; +"3093 3186" -> "3094 3188" [label="[]", style=solid]; +"3094 3188" -> "3095 3197" [label="[]", style=solid]; +"3094 3188" -> "3103 3208" [label="[]", style=solid]; +"3095 3197" -> "3096 3198" [label="[]", style=solid]; +"3096 3198" -> "3100 3201" [label="[]", style=solid]; +"3097 3193" -> "3098 3195" [label="[]", style=solid]; +"3098 3195" -> "3099 3196" [label="[]", style=solid]; +"3098 3195" -> "3336 3206" [label="[]", style=solid]; +"3099 3196" -> "3100 3201" [label="[]", style=solid]; +"3100 3201" -> "3101 3203" [label="[-1, 3]", style=dashed]; +"3101 3203" -> "3102 3204" [label="[-1, 1]", style=dashed]; +"3102 3204" -> "3103 3208" [label="[-1]", style=dashed]; +"3102 3204" -> "3335 3205" [label="[-1]", style=dashed]; +"3103 3208" -> "3176 6520" [label="[]", style=solid]; +"3104 3134" -> "3105 3136" [label="[]", style=dashed]; +"3105 3136" -> "3106 3137" [label="[]", style=dashed]; +"3106 3137" -> "3107 3138" [label="[]", style=solid]; +"3107 3138" -> "3108 3139" [label="[-1, -1]", style=dashed]; +"3108 3139" -> "3109 3140" [label="[-1, -1]", style=dashed]; +"3109 3140" -> "3110 3143" [label="[-1]", style=dashed]; +"3109 3140" -> "3116 3151" [label="[-1]", style=dashed]; +"3110 3143" -> "3112 3144" [label="[-1]", style=dashed]; +"3111 3142" -> "3112 3144" [label="[]", style=solid]; +"3112 3144" -> "3113 3153" [label="[]", style=solid]; +"3112 3144" -> "3121 3164" [label="[]", style=solid]; +"3113 3153" -> "3114 3154" [label="[]", style=solid]; +"3114 3154" -> "3118 3157" [label="[]", style=solid]; +"3115 3149" -> "3116 3151" [label="[]", style=solid]; +"3116 3151" -> "3117 3152" [label="[]", style=solid]; +"3116 3151" -> "3338 3162" [label="[]", style=solid]; +"3117 3152" -> "3118 3157" [label="[]", style=solid]; +"3118 3157" -> "3119 3159" [label="[-1, 3]", style=dashed]; +"3119 3159" -> "3120 3160" [label="[-1, 1]", style=dashed]; +"3120 3160" -> "3121 3164" [label="[-1]", style=dashed]; +"3120 3160" -> "3337 3161" [label="[-1]", style=dashed]; +"3121 3164" -> "3176 6520" [label="[]", style=solid]; +"3122 3090" -> "3123 3092" [label="[]", style=dashed]; +"3123 3092" -> "3124 3093" [label="[]", style=dashed]; +"3124 3093" -> "3125 3094" [label="[]", style=solid]; +"3125 3094" -> "3126 3095" [label="[-1, -1]", style=dashed]; +"3126 3095" -> "3127 3096" [label="[-1, -1]", style=dashed]; +"3127 3096" -> "3128 3099" [label="[-1]", style=dashed]; +"3127 3096" -> "3134 3107" [label="[-1]", style=dashed]; +"3128 3099" -> "3130 3100" [label="[-1]", style=dashed]; +"3129 3098" -> "3130 3100" [label="[]", style=solid]; +"3130 3100" -> "3131 3109" [label="[]", style=solid]; +"3130 3100" -> "3139 3120" [label="[]", style=solid]; +"3131 3109" -> "3132 3110" [label="[]", style=solid]; +"3132 3110" -> "3136 3113" [label="[]", style=solid]; +"3133 3105" -> "3134 3107" [label="[]", style=solid]; +"3134 3107" -> "3135 3108" [label="[]", style=solid]; +"3134 3107" -> "3340 3118" [label="[]", style=solid]; +"3135 3108" -> "3136 3113" [label="[]", style=solid]; +"3136 3113" -> "3137 3115" [label="[-1, 3]", style=dashed]; +"3137 3115" -> "3138 3116" [label="[-1, 1]", style=dashed]; +"3138 3116" -> "3139 3120" [label="[-1]", style=dashed]; +"3138 3116" -> "3339 3117" [label="[-1]", style=dashed]; +"3139 3120" -> "3176 6520" [label="[]", style=solid]; +"3140 3046" -> "3141 3048" [label="[]", style=dashed]; +"3141 3048" -> "3142 3049" [label="[]", style=dashed]; +"3142 3049" -> "3143 3050" [label="[]", style=solid]; +"3143 3050" -> "3144 3051" [label="[-1, -1]", style=dashed]; +"3144 3051" -> "3145 3052" [label="[-1, -1]", style=dashed]; +"3145 3052" -> "3146 3055" [label="[-1]", style=dashed]; +"3145 3052" -> "3152 3063" [label="[-1]", style=dashed]; +"3146 3055" -> "3148 3056" [label="[-1]", style=dashed]; +"3147 3054" -> "3148 3056" [label="[]", style=solid]; +"3148 3056" -> "3149 3065" [label="[]", style=solid]; +"3148 3056" -> "3157 3076" [label="[]", style=solid]; +"3149 3065" -> "3150 3066" [label="[]", style=solid]; +"3150 3066" -> "3154 3069" [label="[]", style=solid]; +"3151 3061" -> "3152 3063" [label="[]", style=solid]; +"3152 3063" -> "3153 3064" [label="[]", style=solid]; +"3152 3063" -> "3342 3074" [label="[]", style=solid]; +"3153 3064" -> "3154 3069" [label="[]", style=solid]; +"3154 3069" -> "3155 3071" [label="[-1, 3]", style=dashed]; +"3155 3071" -> "3156 3072" [label="[-1, 1]", style=dashed]; +"3156 3072" -> "3157 3076" [label="[-1]", style=dashed]; +"3156 3072" -> "3341 3073" [label="[-1]", style=dashed]; +"3157 3076" -> "3176 6520" [label="[]", style=solid]; +"3158 3002" -> "3159 3004" [label="[]", style=dashed]; +"3159 3004" -> "3160 3005" [label="[]", style=dashed]; +"3160 3005" -> "3161 3006" [label="[]", style=solid]; +"3161 3006" -> "3162 3007" [label="[-1, -1]", style=dashed]; +"3162 3007" -> "3163 3008" [label="[-1, -1]", style=dashed]; +"3163 3008" -> "3164 3011" [label="[-1]", style=dashed]; +"3163 3008" -> "3170 3019" [label="[-1]", style=dashed]; +"3164 3011" -> "3166 3012" [label="[-1]", style=dashed]; +"3165 3010" -> "3166 3012" [label="[]", style=solid]; +"3166 3012" -> "3167 3021" [label="[]", style=solid]; +"3166 3012" -> "3175 3032" [label="[]", style=solid]; +"3167 3021" -> "3168 3022" [label="[]", style=solid]; +"3168 3022" -> "3172 3025" [label="[]", style=solid]; +"3169 3017" -> "3170 3019" [label="[]", style=solid]; +"3170 3019" -> "3171 3020" [label="[]", style=solid]; +"3170 3019" -> "3344 3030" [label="[]", style=solid]; +"3171 3020" -> "3172 3025" [label="[]", style=solid]; +"3172 3025" -> "3173 3027" [label="[-1, 3]", style=dashed]; +"3173 3027" -> "3174 3028" [label="[-1, 1]", style=dashed]; +"3174 3028" -> "3175 3032" [label="[-1]", style=dashed]; +"3174 3028" -> "3343 3029" [label="[-1]", style=dashed]; +"3175 3032" -> "3176 6520" [label="[]", style=solid]; +"3176 6520" -> "3177 6521" [label="[]", style=solid]; +"3176 6520" -> "3183 6528" [label="[]", style=solid]; +"3176 6520" -> "3885 6534" [label="[]", style=solid]; +"3177 6521" -> "3178 6523" [label="[-1]", style=dashed]; +"3178 6523" -> "3179 6524" [label="[-1]", style=dashed]; +"3179 6524" -> "3180 6525" [label="[-1]", style=dashed]; "3180 6525" -> "3181 6526" [label="[]", style=dashed]; "3181 6526" -> "3182 6527" [label="[]", style=dashed]; "3182 6527" -> "3183 6528" [label="[1]", style=dashed]; -"3183 6528" -> "3184 6529" [label="[1]", style=dashed]; -"3183 6528" -> "3870 6532" [label="[1]", style=dashed]; -"3183 6528" -> "3884 6533" [label="[1]", style=dashed]; -"3184 6529" -> "3346 6530" [label="[1]", style=dashed]; -"3185 6505" -> "3186 6506" [label="[1]", style=dashed]; -"3186 6506" -> "3345 6518" [label="[1]", style=solid]; -"3186 6506" -> "3549 6513" [label="[1]", style=solid]; -"3187 6461" -> "3188 6462" [label="[1]", style=dashed]; -"3188 6462" -> "3345 6518" [label="[1]", style=solid]; -"3188 6462" -> "3553 6469" [label="[1]", style=solid]; -"3189 6417" -> "3190 6418" [label="[1]", style=dashed]; -"3190 6418" -> "3345 6518" [label="[1]", style=solid]; -"3190 6418" -> "3557 6425" [label="[1]", style=solid]; -"3191 6373" -> "3192 6374" [label="[1]", style=dashed]; -"3192 6374" -> "3345 6518" [label="[1]", style=solid]; -"3192 6374" -> "3561 6381" [label="[1]", style=solid]; -"3193 6329" -> "3194 6330" [label="[1]", style=dashed]; -"3194 6330" -> "3345 6518" [label="[1]", style=solid]; -"3194 6330" -> "3565 6337" [label="[1]", style=solid]; -"3195 6285" -> "3196 6286" [label="[1]", style=dashed]; -"3196 6286" -> "3345 6518" [label="[1]", style=solid]; -"3196 6286" -> "3569 6293" [label="[1]", style=solid]; -"3197 6241" -> "3198 6242" [label="[1]", style=dashed]; -"3198 6242" -> "3345 6518" [label="[1]", style=solid]; -"3198 6242" -> "3573 6249" [label="[1]", style=solid]; -"3199 6197" -> "3200 6198" [label="[1]", style=dashed]; -"3200 6198" -> "3345 6518" [label="[1]", style=solid]; -"3200 6198" -> "3577 6205" [label="[1]", style=solid]; -"3201 6153" -> "3202 6154" [label="[1]", style=dashed]; -"3202 6154" -> "3345 6518" [label="[1]", style=solid]; -"3202 6154" -> "3581 6161" [label="[1]", style=solid]; -"3203 6109" -> "3204 6110" [label="[1]", style=dashed]; -"3204 6110" -> "3345 6518" [label="[1]", style=solid]; -"3204 6110" -> "3585 6117" [label="[1]", style=solid]; -"3205 6065" -> "3206 6066" [label="[1]", style=dashed]; -"3206 6066" -> "3345 6518" [label="[1]", style=solid]; -"3206 6066" -> "3589 6073" [label="[1]", style=solid]; -"3207 6021" -> "3208 6022" [label="[1]", style=dashed]; -"3208 6022" -> "3345 6518" [label="[1]", style=solid]; -"3208 6022" -> "3593 6029" [label="[1]", style=solid]; -"3209 5977" -> "3210 5978" [label="[1]", style=dashed]; -"3210 5978" -> "3345 6518" [label="[1]", style=solid]; -"3210 5978" -> "3597 5985" [label="[1]", style=solid]; -"3211 5933" -> "3212 5934" [label="[1]", style=dashed]; -"3212 5934" -> "3345 6518" [label="[1]", style=solid]; -"3212 5934" -> "3601 5941" [label="[1]", style=solid]; -"3213 5889" -> "3214 5890" [label="[1]", style=dashed]; -"3214 5890" -> "3345 6518" [label="[1]", style=solid]; -"3214 5890" -> "3605 5897" [label="[1]", style=solid]; -"3215 5845" -> "3216 5846" [label="[1]", style=dashed]; -"3216 5846" -> "3345 6518" [label="[1]", style=solid]; -"3216 5846" -> "3609 5853" [label="[1]", style=solid]; -"3217 5801" -> "3218 5802" [label="[1]", style=dashed]; -"3218 5802" -> "3345 6518" [label="[1]", style=solid]; -"3218 5802" -> "3613 5809" [label="[1]", style=solid]; -"3219 5757" -> "3220 5758" [label="[1]", style=dashed]; -"3220 5758" -> "3345 6518" [label="[1]", style=solid]; -"3220 5758" -> "3617 5765" [label="[1]", style=solid]; -"3221 5713" -> "3222 5714" [label="[1]", style=dashed]; -"3222 5714" -> "3345 6518" [label="[1]", style=solid]; -"3222 5714" -> "3621 5721" [label="[1]", style=solid]; -"3223 5669" -> "3224 5670" [label="[1]", style=dashed]; -"3224 5670" -> "3345 6518" [label="[1]", style=solid]; -"3224 5670" -> "3625 5677" [label="[1]", style=solid]; -"3225 5625" -> "3226 5626" [label="[1]", style=dashed]; -"3226 5626" -> "3345 6518" [label="[1]", style=solid]; -"3226 5626" -> "3629 5633" [label="[1]", style=solid]; -"3227 5581" -> "3228 5582" [label="[1]", style=dashed]; -"3228 5582" -> "3345 6518" [label="[1]", style=solid]; -"3228 5582" -> "3633 5589" [label="[1]", style=solid]; -"3229 5537" -> "3230 5538" [label="[1]", style=dashed]; -"3230 5538" -> "3345 6518" [label="[1]", style=solid]; -"3230 5538" -> "3637 5545" [label="[1]", style=solid]; -"3231 5493" -> "3232 5494" [label="[1]", style=dashed]; -"3232 5494" -> "3345 6518" [label="[1]", style=solid]; -"3232 5494" -> "3641 5501" [label="[1]", style=solid]; -"3233 5449" -> "3234 5450" [label="[1]", style=dashed]; -"3234 5450" -> "3345 6518" [label="[1]", style=solid]; -"3234 5450" -> "3645 5457" [label="[1]", style=solid]; -"3235 5405" -> "3236 5406" [label="[1]", style=dashed]; -"3236 5406" -> "3345 6518" [label="[1]", style=solid]; -"3236 5406" -> "3649 5413" [label="[1]", style=solid]; -"3237 5361" -> "3238 5362" [label="[1]", style=dashed]; -"3238 5362" -> "3345 6518" [label="[1]", style=solid]; -"3238 5362" -> "3653 5369" [label="[1]", style=solid]; -"3239 5317" -> "3240 5318" [label="[1]", style=dashed]; -"3240 5318" -> "3345 6518" [label="[1]", style=solid]; -"3240 5318" -> "3657 5325" [label="[1]", style=solid]; -"3241 5273" -> "3242 5274" [label="[1]", style=dashed]; -"3242 5274" -> "3345 6518" [label="[1]", style=solid]; -"3242 5274" -> "3661 5281" [label="[1]", style=solid]; -"3243 5229" -> "3244 5230" [label="[1]", style=dashed]; -"3244 5230" -> "3345 6518" [label="[1]", style=solid]; -"3244 5230" -> "3665 5237" [label="[1]", style=solid]; -"3245 5185" -> "3246 5186" [label="[1]", style=dashed]; -"3246 5186" -> "3345 6518" [label="[1]", style=solid]; -"3246 5186" -> "3669 5193" [label="[1]", style=solid]; -"3247 5141" -> "3248 5142" [label="[1]", style=dashed]; -"3248 5142" -> "3345 6518" [label="[1]", style=solid]; -"3248 5142" -> "3673 5149" [label="[1]", style=solid]; -"3249 5097" -> "3250 5098" [label="[1]", style=dashed]; -"3250 5098" -> "3345 6518" [label="[1]", style=solid]; -"3250 5098" -> "3677 5105" [label="[1]", style=solid]; -"3251 5053" -> "3252 5054" [label="[1]", style=dashed]; -"3252 5054" -> "3345 6518" [label="[1]", style=solid]; -"3252 5054" -> "3681 5061" [label="[1]", style=solid]; -"3253 5009" -> "3254 5010" [label="[1]", style=dashed]; -"3254 5010" -> "3345 6518" [label="[1]", style=solid]; -"3254 5010" -> "3685 5017" [label="[1]", style=solid]; -"3255 4965" -> "3256 4966" [label="[1]", style=dashed]; -"3256 4966" -> "3345 6518" [label="[1]", style=solid]; -"3256 4966" -> "3689 4973" [label="[1]", style=solid]; -"3257 4921" -> "3258 4922" [label="[1]", style=dashed]; -"3258 4922" -> "3345 6518" [label="[1]", style=solid]; -"3258 4922" -> "3693 4929" [label="[1]", style=solid]; -"3259 4877" -> "3260 4878" [label="[1]", style=dashed]; -"3260 4878" -> "3345 6518" [label="[1]", style=solid]; -"3260 4878" -> "3697 4885" [label="[1]", style=solid]; -"3261 4833" -> "3262 4834" [label="[1]", style=dashed]; -"3262 4834" -> "3345 6518" [label="[1]", style=solid]; -"3262 4834" -> "3701 4841" [label="[1]", style=solid]; -"3263 4789" -> "3264 4790" [label="[1]", style=dashed]; -"3264 4790" -> "3345 6518" [label="[1]", style=solid]; -"3264 4790" -> "3705 4797" [label="[1]", style=solid]; -"3265 4745" -> "3266 4746" [label="[1]", style=dashed]; -"3266 4746" -> "3345 6518" [label="[1]", style=solid]; -"3266 4746" -> "3709 4753" [label="[1]", style=solid]; -"3267 4701" -> "3268 4702" [label="[1]", style=dashed]; -"3268 4702" -> "3345 6518" [label="[1]", style=solid]; -"3268 4702" -> "3713 4709" [label="[1]", style=solid]; -"3269 4657" -> "3270 4658" [label="[1]", style=dashed]; -"3270 4658" -> "3345 6518" [label="[1]", style=solid]; -"3270 4658" -> "3717 4665" [label="[1]", style=solid]; -"3271 4613" -> "3272 4614" [label="[1]", style=dashed]; -"3272 4614" -> "3345 6518" [label="[1]", style=solid]; -"3272 4614" -> "3721 4621" [label="[1]", style=solid]; -"3273 4569" -> "3274 4570" [label="[1]", style=dashed]; -"3274 4570" -> "3345 6518" [label="[1]", style=solid]; -"3274 4570" -> "3725 4577" [label="[1]", style=solid]; -"3275 4525" -> "3276 4526" [label="[1]", style=dashed]; -"3276 4526" -> "3345 6518" [label="[1]", style=solid]; -"3276 4526" -> "3729 4533" [label="[1]", style=solid]; -"3277 4481" -> "3278 4482" [label="[1]", style=dashed]; -"3278 4482" -> "3345 6518" [label="[1]", style=solid]; -"3278 4482" -> "3733 4489" [label="[1]", style=solid]; -"3279 4437" -> "3280 4438" [label="[1]", style=dashed]; -"3280 4438" -> "3345 6518" [label="[1]", style=solid]; -"3280 4438" -> "3737 4445" [label="[1]", style=solid]; -"3281 4393" -> "3282 4394" [label="[1]", style=dashed]; -"3282 4394" -> "3345 6518" [label="[1]", style=solid]; -"3282 4394" -> "3741 4401" [label="[1]", style=solid]; -"3283 4349" -> "3284 4350" [label="[1]", style=dashed]; -"3284 4350" -> "3345 6518" [label="[1]", style=solid]; -"3284 4350" -> "3745 4357" [label="[1]", style=solid]; -"3285 4305" -> "3286 4306" [label="[1]", style=dashed]; -"3286 4306" -> "3345 6518" [label="[1]", style=solid]; -"3286 4306" -> "3749 4313" [label="[1]", style=solid]; -"3287 4261" -> "3288 4262" [label="[1]", style=dashed]; -"3288 4262" -> "3345 6518" [label="[1]", style=solid]; -"3288 4262" -> "3753 4269" [label="[1]", style=solid]; -"3289 4217" -> "3290 4218" [label="[1]", style=dashed]; -"3290 4218" -> "3345 6518" [label="[1]", style=solid]; -"3290 4218" -> "3757 4225" [label="[1]", style=solid]; -"3291 4173" -> "3292 4174" [label="[1]", style=dashed]; -"3292 4174" -> "3345 6518" [label="[1]", style=solid]; -"3292 4174" -> "3761 4181" [label="[1]", style=solid]; -"3293 4129" -> "3294 4130" [label="[1]", style=dashed]; -"3294 4130" -> "3345 6518" [label="[1]", style=solid]; -"3294 4130" -> "3765 4137" [label="[1]", style=solid]; -"3295 4085" -> "3296 4086" [label="[1]", style=dashed]; -"3296 4086" -> "3345 6518" [label="[1]", style=solid]; -"3296 4086" -> "3769 4093" [label="[1]", style=solid]; -"3297 4041" -> "3298 4042" [label="[1]", style=dashed]; -"3298 4042" -> "3345 6518" [label="[1]", style=solid]; -"3298 4042" -> "3773 4049" [label="[1]", style=solid]; -"3299 3997" -> "3300 3998" [label="[1]", style=dashed]; -"3300 3998" -> "3345 6518" [label="[1]", style=solid]; -"3300 3998" -> "3777 4005" [label="[1]", style=solid]; -"3301 3953" -> "3302 3954" [label="[1]", style=dashed]; -"3302 3954" -> "3345 6518" [label="[1]", style=solid]; -"3302 3954" -> "3781 3961" [label="[1]", style=solid]; -"3303 3909" -> "3304 3910" [label="[1]", style=dashed]; -"3304 3910" -> "3345 6518" [label="[1]", style=solid]; -"3304 3910" -> "3785 3917" [label="[1]", style=solid]; -"3305 3865" -> "3306 3866" [label="[1]", style=dashed]; -"3306 3866" -> "3345 6518" [label="[1]", style=solid]; -"3306 3866" -> "3789 3873" [label="[1]", style=solid]; -"3307 3821" -> "3308 3822" [label="[1]", style=dashed]; -"3308 3822" -> "3345 6518" [label="[1]", style=solid]; -"3308 3822" -> "3793 3829" [label="[1]", style=solid]; -"3309 3777" -> "3310 3778" [label="[1]", style=dashed]; -"3310 3778" -> "3345 6518" [label="[1]", style=solid]; -"3310 3778" -> "3797 3785" [label="[1]", style=solid]; -"3311 3733" -> "3312 3734" [label="[1]", style=dashed]; -"3312 3734" -> "3345 6518" [label="[1]", style=solid]; -"3312 3734" -> "3801 3741" [label="[1]", style=solid]; -"3313 3689" -> "3314 3690" [label="[1]", style=dashed]; -"3314 3690" -> "3345 6518" [label="[1]", style=solid]; -"3314 3690" -> "3805 3697" [label="[1]", style=solid]; -"3315 3645" -> "3316 3646" [label="[1]", style=dashed]; -"3316 3646" -> "3345 6518" [label="[1]", style=solid]; -"3316 3646" -> "3809 3653" [label="[1]", style=solid]; -"3317 3601" -> "3318 3602" [label="[1]", style=dashed]; -"3318 3602" -> "3345 6518" [label="[1]", style=solid]; -"3318 3602" -> "3813 3609" [label="[1]", style=solid]; -"3319 3557" -> "3320 3558" [label="[1]", style=dashed]; -"3320 3558" -> "3345 6518" [label="[1]", style=solid]; -"3320 3558" -> "3817 3565" [label="[1]", style=solid]; -"3321 3513" -> "3322 3514" [label="[1]", style=dashed]; -"3322 3514" -> "3345 6518" [label="[1]", style=solid]; -"3322 3514" -> "3821 3521" [label="[1]", style=solid]; -"3323 3469" -> "3324 3470" [label="[1]", style=dashed]; -"3324 3470" -> "3345 6518" [label="[1]", style=solid]; -"3324 3470" -> "3825 3477" [label="[1]", style=solid]; -"3325 3425" -> "3326 3426" [label="[1]", style=dashed]; -"3326 3426" -> "3345 6518" [label="[1]", style=solid]; -"3326 3426" -> "3829 3433" [label="[1]", style=solid]; -"3327 3381" -> "3328 3382" [label="[1]", style=dashed]; -"3328 3382" -> "3345 6518" [label="[1]", style=solid]; -"3328 3382" -> "3833 3389" [label="[1]", style=solid]; -"3329 3337" -> "3330 3338" [label="[1]", style=dashed]; -"3330 3338" -> "3345 6518" [label="[1]", style=solid]; -"3330 3338" -> "3837 3345" [label="[1]", style=solid]; -"3331 3293" -> "3332 3294" [label="[1]", style=dashed]; -"3332 3294" -> "3345 6518" [label="[1]", style=solid]; -"3332 3294" -> "3841 3301" [label="[1]", style=solid]; -"3333 3249" -> "3334 3250" [label="[1]", style=dashed]; -"3334 3250" -> "3345 6518" [label="[1]", style=solid]; -"3334 3250" -> "3845 3257" [label="[1]", style=solid]; -"3335 3205" -> "3336 3206" [label="[1]", style=dashed]; -"3336 3206" -> "3345 6518" [label="[1]", style=solid]; -"3336 3206" -> "3849 3213" [label="[1]", style=solid]; -"3337 3161" -> "3338 3162" [label="[1]", style=dashed]; -"3338 3162" -> "3345 6518" [label="[1]", style=solid]; -"3338 3162" -> "3853 3169" [label="[1]", style=solid]; -"3339 3117" -> "3340 3118" [label="[1]", style=dashed]; -"3340 3118" -> "3345 6518" [label="[1]", style=solid]; -"3340 3118" -> "3857 3125" [label="[1]", style=solid]; -"3341 3073" -> "3342 3074" [label="[1]", style=dashed]; -"3342 3074" -> "3345 6518" [label="[1]", style=solid]; -"3342 3074" -> "3861 3081" [label="[1]", style=solid]; -"3343 3029" -> "3344 3030" [label="[1]", style=dashed]; -"3344 3030" -> "3345 6518" [label="[1]", style=solid]; -"3344 3030" -> "3865 3037" [label="[1]", style=solid]; -"3345 6518" -> "3346 6530" [label="[1]", style=solid]; -"3346 6530" -> "3347 6576" [label="[1, 4]", style=solid]; -"3346 6530" -> "3349 6569" [label="[1, 4]", style=solid]; -"3346 6530" -> "3353 6559" [label="[1, 4]", style=solid]; -"3346 6530" -> "3355 6552" [label="[1, 4]", style=solid]; -"3346 6530" -> "3376 6539" [label="[1, 4]", style=solid]; -"3346 6530" -> "3380 6547" [label="[1, 4]", style=solid]; -"3346 6530" -> "3887 nncf_model_output_0" [label="[1, 4]", style=solid]; -"3347 6576" -> "3348 6578" [label="[1, 4]", style=solid]; -"3348 6578" -> "3351 6579" [label="[1]", style=solid]; -"3349 6569" -> "3350 6571" [label="[1, 4]", style=solid]; -"3350 6571" -> "3351 6579" [label="[1]", style=solid]; -"3351 6579" -> "3352 6581" [label="[1]", style=solid]; -"3352 6581" -> "3359 6582" [label="[1]", style=solid]; -"3353 6559" -> "3354 6561" [label="[1, 4]", style=solid]; -"3354 6561" -> "3357 6562" [label="[1]", style=solid]; -"3355 6552" -> "3356 6554" [label="[1, 4]", style=solid]; -"3356 6554" -> "3357 6562" [label="[1]", style=solid]; -"3357 6562" -> "3358 6564" [label="[1]", style=solid]; -"3358 6564" -> "3359 6582" [label="[1]", style=solid]; -"3359 6582" -> "3360 6583" [label="[1]", style=solid]; -"3360 6583" -> "3361 6586" [label="[1]", style=solid]; -"3361 6586" -> "3362 6587" [label="[1]", style=solid]; -"3362 6587" -> "3363 6588" [label="[1]", style=solid]; -"3363 6588" -> "3364 6590" [label="[1]", style=solid]; -"3364 6590" -> "3365 6592" [label="[1]", style=solid]; -"3365 6592" -> "3366 6593" [label="[1]", style=solid]; -"3366 6593" -> "3367 6594" [label="[1]", style=solid]; -"3367 6594" -> "3368 6595" [label="[1]", style=solid]; -"3368 6595" -> "3369 6597" [label="[1]", style=dashed]; -"3369 6597" -> "3370 6599" [label="[1]", style=dashed]; -"3369 6597" -> "3397 6685" [label="[1]", style=dashed]; -"3369 6597" -> "3417 6667" [label="[1]", style=dashed]; -"3369 6597" -> "3423 6616" [label="[1]", style=dashed]; -"3369 6597" -> "3445 6713" [label="[1]", style=dashed]; -"3369 6597" -> "3457 6633" [label="[1]", style=dashed]; -"3369 6597" -> "3479 6741" [label="[1]", style=dashed]; -"3369 6597" -> "3491 6650" [label="[1]", style=dashed]; -"3369 6597" -> "3513 6769" [label="[1]", style=dashed]; -"3370 6599" -> "3371 6601" [label="[1]", style=dashed]; -"3371 6601" -> "3372 6602" [label="[1]", style=solid]; -"3372 6602" -> "3373 6603" [label="[1, 1]", style=dashed]; -"3373 6603" -> "3374 6604" [label="[1, 1]", style=dashed]; -"3374 6604" -> "3375 6605" [label="[1]", style=dashed]; -"3375 6605" -> "3381 6606" [label="[1]", style=dashed]; -"3376 6539" -> "3377 6544" [label="[1, 4]", style=solid]; -"3377 6544" -> "3378 6545" [label="[1, 1]", style=solid]; +"3183 6528" -> "3184 6529" [label="[]", style=dashed]; +"3183 6528" -> "3870 6532" [label="[]", style=dashed]; +"3183 6528" -> "3884 6533" [label="[]", style=dashed]; +"3184 6529" -> "3346 6530" [label="[]", style=dashed]; +"3185 6505" -> "3186 6506" [label="[-1]", style=dashed]; +"3186 6506" -> "3345 6518" [label="[]", style=solid]; +"3186 6506" -> "3549 6513" [label="[]", style=solid]; +"3187 6461" -> "3188 6462" [label="[-1]", style=dashed]; +"3188 6462" -> "3345 6518" [label="[]", style=solid]; +"3188 6462" -> "3553 6469" [label="[]", style=solid]; +"3189 6417" -> "3190 6418" [label="[-1]", style=dashed]; +"3190 6418" -> "3345 6518" [label="[]", style=solid]; +"3190 6418" -> "3557 6425" [label="[]", style=solid]; +"3191 6373" -> "3192 6374" [label="[-1]", style=dashed]; +"3192 6374" -> "3345 6518" [label="[]", style=solid]; +"3192 6374" -> "3561 6381" [label="[]", style=solid]; +"3193 6329" -> "3194 6330" [label="[-1]", style=dashed]; +"3194 6330" -> "3345 6518" [label="[]", style=solid]; +"3194 6330" -> "3565 6337" [label="[]", style=solid]; +"3195 6285" -> "3196 6286" [label="[-1]", style=dashed]; +"3196 6286" -> "3345 6518" [label="[]", style=solid]; +"3196 6286" -> "3569 6293" [label="[]", style=solid]; +"3197 6241" -> "3198 6242" [label="[-1]", style=dashed]; +"3198 6242" -> "3345 6518" [label="[]", style=solid]; +"3198 6242" -> "3573 6249" [label="[]", style=solid]; +"3199 6197" -> "3200 6198" [label="[-1]", style=dashed]; +"3200 6198" -> "3345 6518" [label="[]", style=solid]; +"3200 6198" -> "3577 6205" [label="[]", style=solid]; +"3201 6153" -> "3202 6154" [label="[-1]", style=dashed]; +"3202 6154" -> "3345 6518" [label="[]", style=solid]; +"3202 6154" -> "3581 6161" [label="[]", style=solid]; +"3203 6109" -> "3204 6110" [label="[-1]", style=dashed]; +"3204 6110" -> "3345 6518" [label="[]", style=solid]; +"3204 6110" -> "3585 6117" [label="[]", style=solid]; +"3205 6065" -> "3206 6066" [label="[-1]", style=dashed]; +"3206 6066" -> "3345 6518" [label="[]", style=solid]; +"3206 6066" -> "3589 6073" [label="[]", style=solid]; +"3207 6021" -> "3208 6022" [label="[-1]", style=dashed]; +"3208 6022" -> "3345 6518" [label="[]", style=solid]; +"3208 6022" -> "3593 6029" [label="[]", style=solid]; +"3209 5977" -> "3210 5978" [label="[-1]", style=dashed]; +"3210 5978" -> "3345 6518" [label="[]", style=solid]; +"3210 5978" -> "3597 5985" [label="[]", style=solid]; +"3211 5933" -> "3212 5934" [label="[-1]", style=dashed]; +"3212 5934" -> "3345 6518" [label="[]", style=solid]; +"3212 5934" -> "3601 5941" [label="[]", style=solid]; +"3213 5889" -> "3214 5890" [label="[-1]", style=dashed]; +"3214 5890" -> "3345 6518" [label="[]", style=solid]; +"3214 5890" -> "3605 5897" [label="[]", style=solid]; +"3215 5845" -> "3216 5846" [label="[-1]", style=dashed]; +"3216 5846" -> "3345 6518" [label="[]", style=solid]; +"3216 5846" -> "3609 5853" [label="[]", style=solid]; +"3217 5801" -> "3218 5802" [label="[-1]", style=dashed]; +"3218 5802" -> "3345 6518" [label="[]", style=solid]; +"3218 5802" -> "3613 5809" [label="[]", style=solid]; +"3219 5757" -> "3220 5758" [label="[-1]", style=dashed]; +"3220 5758" -> "3345 6518" [label="[]", style=solid]; +"3220 5758" -> "3617 5765" [label="[]", style=solid]; +"3221 5713" -> "3222 5714" [label="[-1]", style=dashed]; +"3222 5714" -> "3345 6518" [label="[]", style=solid]; +"3222 5714" -> "3621 5721" [label="[]", style=solid]; +"3223 5669" -> "3224 5670" [label="[-1]", style=dashed]; +"3224 5670" -> "3345 6518" [label="[]", style=solid]; +"3224 5670" -> "3625 5677" [label="[]", style=solid]; +"3225 5625" -> "3226 5626" [label="[-1]", style=dashed]; +"3226 5626" -> "3345 6518" [label="[]", style=solid]; +"3226 5626" -> "3629 5633" [label="[]", style=solid]; +"3227 5581" -> "3228 5582" [label="[-1]", style=dashed]; +"3228 5582" -> "3345 6518" [label="[]", style=solid]; +"3228 5582" -> "3633 5589" [label="[]", style=solid]; +"3229 5537" -> "3230 5538" [label="[-1]", style=dashed]; +"3230 5538" -> "3345 6518" [label="[]", style=solid]; +"3230 5538" -> "3637 5545" [label="[]", style=solid]; +"3231 5493" -> "3232 5494" [label="[-1]", style=dashed]; +"3232 5494" -> "3345 6518" [label="[]", style=solid]; +"3232 5494" -> "3641 5501" [label="[]", style=solid]; +"3233 5449" -> "3234 5450" [label="[-1]", style=dashed]; +"3234 5450" -> "3345 6518" [label="[]", style=solid]; +"3234 5450" -> "3645 5457" [label="[]", style=solid]; +"3235 5405" -> "3236 5406" [label="[-1]", style=dashed]; +"3236 5406" -> "3345 6518" [label="[]", style=solid]; +"3236 5406" -> "3649 5413" [label="[]", style=solid]; +"3237 5361" -> "3238 5362" [label="[-1]", style=dashed]; +"3238 5362" -> "3345 6518" [label="[]", style=solid]; +"3238 5362" -> "3653 5369" [label="[]", style=solid]; +"3239 5317" -> "3240 5318" [label="[-1]", style=dashed]; +"3240 5318" -> "3345 6518" [label="[]", style=solid]; +"3240 5318" -> "3657 5325" [label="[]", style=solid]; +"3241 5273" -> "3242 5274" [label="[-1]", style=dashed]; +"3242 5274" -> "3345 6518" [label="[]", style=solid]; +"3242 5274" -> "3661 5281" [label="[]", style=solid]; +"3243 5229" -> "3244 5230" [label="[-1]", style=dashed]; +"3244 5230" -> "3345 6518" [label="[]", style=solid]; +"3244 5230" -> "3665 5237" [label="[]", style=solid]; +"3245 5185" -> "3246 5186" [label="[-1]", style=dashed]; +"3246 5186" -> "3345 6518" [label="[]", style=solid]; +"3246 5186" -> "3669 5193" [label="[]", style=solid]; +"3247 5141" -> "3248 5142" [label="[-1]", style=dashed]; +"3248 5142" -> "3345 6518" [label="[]", style=solid]; +"3248 5142" -> "3673 5149" [label="[]", style=solid]; +"3249 5097" -> "3250 5098" [label="[-1]", style=dashed]; +"3250 5098" -> "3345 6518" [label="[]", style=solid]; +"3250 5098" -> "3677 5105" [label="[]", style=solid]; +"3251 5053" -> "3252 5054" [label="[-1]", style=dashed]; +"3252 5054" -> "3345 6518" [label="[]", style=solid]; +"3252 5054" -> "3681 5061" [label="[]", style=solid]; +"3253 5009" -> "3254 5010" [label="[-1]", style=dashed]; +"3254 5010" -> "3345 6518" [label="[]", style=solid]; +"3254 5010" -> "3685 5017" [label="[]", style=solid]; +"3255 4965" -> "3256 4966" [label="[-1]", style=dashed]; +"3256 4966" -> "3345 6518" [label="[]", style=solid]; +"3256 4966" -> "3689 4973" [label="[]", style=solid]; +"3257 4921" -> "3258 4922" [label="[-1]", style=dashed]; +"3258 4922" -> "3345 6518" [label="[]", style=solid]; +"3258 4922" -> "3693 4929" [label="[]", style=solid]; +"3259 4877" -> "3260 4878" [label="[-1]", style=dashed]; +"3260 4878" -> "3345 6518" [label="[]", style=solid]; +"3260 4878" -> "3697 4885" [label="[]", style=solid]; +"3261 4833" -> "3262 4834" [label="[-1]", style=dashed]; +"3262 4834" -> "3345 6518" [label="[]", style=solid]; +"3262 4834" -> "3701 4841" [label="[]", style=solid]; +"3263 4789" -> "3264 4790" [label="[-1]", style=dashed]; +"3264 4790" -> "3345 6518" [label="[]", style=solid]; +"3264 4790" -> "3705 4797" [label="[]", style=solid]; +"3265 4745" -> "3266 4746" [label="[-1]", style=dashed]; +"3266 4746" -> "3345 6518" [label="[]", style=solid]; +"3266 4746" -> "3709 4753" [label="[]", style=solid]; +"3267 4701" -> "3268 4702" [label="[-1]", style=dashed]; +"3268 4702" -> "3345 6518" [label="[]", style=solid]; +"3268 4702" -> "3713 4709" [label="[]", style=solid]; +"3269 4657" -> "3270 4658" [label="[-1]", style=dashed]; +"3270 4658" -> "3345 6518" [label="[]", style=solid]; +"3270 4658" -> "3717 4665" [label="[]", style=solid]; +"3271 4613" -> "3272 4614" [label="[-1]", style=dashed]; +"3272 4614" -> "3345 6518" [label="[]", style=solid]; +"3272 4614" -> "3721 4621" [label="[]", style=solid]; +"3273 4569" -> "3274 4570" [label="[-1]", style=dashed]; +"3274 4570" -> "3345 6518" [label="[]", style=solid]; +"3274 4570" -> "3725 4577" [label="[]", style=solid]; +"3275 4525" -> "3276 4526" [label="[-1]", style=dashed]; +"3276 4526" -> "3345 6518" [label="[]", style=solid]; +"3276 4526" -> "3729 4533" [label="[]", style=solid]; +"3277 4481" -> "3278 4482" [label="[-1]", style=dashed]; +"3278 4482" -> "3345 6518" [label="[]", style=solid]; +"3278 4482" -> "3733 4489" [label="[]", style=solid]; +"3279 4437" -> "3280 4438" [label="[-1]", style=dashed]; +"3280 4438" -> "3345 6518" [label="[]", style=solid]; +"3280 4438" -> "3737 4445" [label="[]", style=solid]; +"3281 4393" -> "3282 4394" [label="[-1]", style=dashed]; +"3282 4394" -> "3345 6518" [label="[]", style=solid]; +"3282 4394" -> "3741 4401" [label="[]", style=solid]; +"3283 4349" -> "3284 4350" [label="[-1]", style=dashed]; +"3284 4350" -> "3345 6518" [label="[]", style=solid]; +"3284 4350" -> "3745 4357" [label="[]", style=solid]; +"3285 4305" -> "3286 4306" [label="[-1]", style=dashed]; +"3286 4306" -> "3345 6518" [label="[]", style=solid]; +"3286 4306" -> "3749 4313" [label="[]", style=solid]; +"3287 4261" -> "3288 4262" [label="[-1]", style=dashed]; +"3288 4262" -> "3345 6518" [label="[]", style=solid]; +"3288 4262" -> "3753 4269" [label="[]", style=solid]; +"3289 4217" -> "3290 4218" [label="[-1]", style=dashed]; +"3290 4218" -> "3345 6518" [label="[]", style=solid]; +"3290 4218" -> "3757 4225" [label="[]", style=solid]; +"3291 4173" -> "3292 4174" [label="[-1]", style=dashed]; +"3292 4174" -> "3345 6518" [label="[]", style=solid]; +"3292 4174" -> "3761 4181" [label="[]", style=solid]; +"3293 4129" -> "3294 4130" [label="[-1]", style=dashed]; +"3294 4130" -> "3345 6518" [label="[]", style=solid]; +"3294 4130" -> "3765 4137" [label="[]", style=solid]; +"3295 4085" -> "3296 4086" [label="[-1]", style=dashed]; +"3296 4086" -> "3345 6518" [label="[]", style=solid]; +"3296 4086" -> "3769 4093" [label="[]", style=solid]; +"3297 4041" -> "3298 4042" [label="[-1]", style=dashed]; +"3298 4042" -> "3345 6518" [label="[]", style=solid]; +"3298 4042" -> "3773 4049" [label="[]", style=solid]; +"3299 3997" -> "3300 3998" [label="[-1]", style=dashed]; +"3300 3998" -> "3345 6518" [label="[]", style=solid]; +"3300 3998" -> "3777 4005" [label="[]", style=solid]; +"3301 3953" -> "3302 3954" [label="[-1]", style=dashed]; +"3302 3954" -> "3345 6518" [label="[]", style=solid]; +"3302 3954" -> "3781 3961" [label="[]", style=solid]; +"3303 3909" -> "3304 3910" [label="[-1]", style=dashed]; +"3304 3910" -> "3345 6518" [label="[]", style=solid]; +"3304 3910" -> "3785 3917" [label="[]", style=solid]; +"3305 3865" -> "3306 3866" [label="[-1]", style=dashed]; +"3306 3866" -> "3345 6518" [label="[]", style=solid]; +"3306 3866" -> "3789 3873" [label="[]", style=solid]; +"3307 3821" -> "3308 3822" [label="[-1]", style=dashed]; +"3308 3822" -> "3345 6518" [label="[]", style=solid]; +"3308 3822" -> "3793 3829" [label="[]", style=solid]; +"3309 3777" -> "3310 3778" [label="[-1]", style=dashed]; +"3310 3778" -> "3345 6518" [label="[]", style=solid]; +"3310 3778" -> "3797 3785" [label="[]", style=solid]; +"3311 3733" -> "3312 3734" [label="[-1]", style=dashed]; +"3312 3734" -> "3345 6518" [label="[]", style=solid]; +"3312 3734" -> "3801 3741" [label="[]", style=solid]; +"3313 3689" -> "3314 3690" [label="[-1]", style=dashed]; +"3314 3690" -> "3345 6518" [label="[]", style=solid]; +"3314 3690" -> "3805 3697" [label="[]", style=solid]; +"3315 3645" -> "3316 3646" [label="[-1]", style=dashed]; +"3316 3646" -> "3345 6518" [label="[]", style=solid]; +"3316 3646" -> "3809 3653" [label="[]", style=solid]; +"3317 3601" -> "3318 3602" [label="[-1]", style=dashed]; +"3318 3602" -> "3345 6518" [label="[]", style=solid]; +"3318 3602" -> "3813 3609" [label="[]", style=solid]; +"3319 3557" -> "3320 3558" [label="[-1]", style=dashed]; +"3320 3558" -> "3345 6518" [label="[]", style=solid]; +"3320 3558" -> "3817 3565" [label="[]", style=solid]; +"3321 3513" -> "3322 3514" [label="[-1]", style=dashed]; +"3322 3514" -> "3345 6518" [label="[]", style=solid]; +"3322 3514" -> "3821 3521" [label="[]", style=solid]; +"3323 3469" -> "3324 3470" [label="[-1]", style=dashed]; +"3324 3470" -> "3345 6518" [label="[]", style=solid]; +"3324 3470" -> "3825 3477" [label="[]", style=solid]; +"3325 3425" -> "3326 3426" [label="[-1]", style=dashed]; +"3326 3426" -> "3345 6518" [label="[]", style=solid]; +"3326 3426" -> "3829 3433" [label="[]", style=solid]; +"3327 3381" -> "3328 3382" [label="[-1]", style=dashed]; +"3328 3382" -> "3345 6518" [label="[]", style=solid]; +"3328 3382" -> "3833 3389" [label="[]", style=solid]; +"3329 3337" -> "3330 3338" [label="[-1]", style=dashed]; +"3330 3338" -> "3345 6518" [label="[]", style=solid]; +"3330 3338" -> "3837 3345" [label="[]", style=solid]; +"3331 3293" -> "3332 3294" [label="[-1]", style=dashed]; +"3332 3294" -> "3345 6518" [label="[]", style=solid]; +"3332 3294" -> "3841 3301" [label="[]", style=solid]; +"3333 3249" -> "3334 3250" [label="[-1]", style=dashed]; +"3334 3250" -> "3345 6518" [label="[]", style=solid]; +"3334 3250" -> "3845 3257" [label="[]", style=solid]; +"3335 3205" -> "3336 3206" [label="[-1]", style=dashed]; +"3336 3206" -> "3345 6518" [label="[]", style=solid]; +"3336 3206" -> "3849 3213" [label="[]", style=solid]; +"3337 3161" -> "3338 3162" [label="[-1]", style=dashed]; +"3338 3162" -> "3345 6518" [label="[]", style=solid]; +"3338 3162" -> "3853 3169" [label="[]", style=solid]; +"3339 3117" -> "3340 3118" [label="[-1]", style=dashed]; +"3340 3118" -> "3345 6518" [label="[]", style=solid]; +"3340 3118" -> "3857 3125" [label="[]", style=solid]; +"3341 3073" -> "3342 3074" [label="[-1]", style=dashed]; +"3342 3074" -> "3345 6518" [label="[]", style=solid]; +"3342 3074" -> "3861 3081" [label="[]", style=solid]; +"3343 3029" -> "3344 3030" [label="[-1]", style=dashed]; +"3344 3030" -> "3345 6518" [label="[]", style=solid]; +"3344 3030" -> "3865 3037" [label="[]", style=solid]; +"3345 6518" -> "3346 6530" [label="[]", style=solid]; +"3346 6530" -> "3347 6576" [label="[-1, 4]", style=solid]; +"3346 6530" -> "3349 6569" [label="[-1, 4]", style=solid]; +"3346 6530" -> "3353 6559" [label="[-1, 4]", style=solid]; +"3346 6530" -> "3355 6552" [label="[-1, 4]", style=solid]; +"3346 6530" -> "3376 6539" [label="[-1, 4]", style=solid]; +"3346 6530" -> "3380 6547" [label="[-1, 4]", style=solid]; +"3346 6530" -> "3887 nncf_model_output_0" [label="[-1, 4]", style=solid]; +"3347 6576" -> "3348 6578" [label="[-1, 4]", style=solid]; +"3348 6578" -> "3351 6579" [label="[-1]", style=solid]; +"3349 6569" -> "3350 6571" [label="[-1, 4]", style=solid]; +"3350 6571" -> "3351 6579" [label="[-1]", style=solid]; +"3351 6579" -> "3352 6581" [label="[-1]", style=solid]; +"3352 6581" -> "3359 6582" [label="[-1]", style=solid]; +"3353 6559" -> "3354 6561" [label="[-1, 4]", style=solid]; +"3354 6561" -> "3357 6562" [label="[-1]", style=solid]; +"3355 6552" -> "3356 6554" [label="[-1, 4]", style=solid]; +"3356 6554" -> "3357 6562" [label="[-1]", style=solid]; +"3357 6562" -> "3358 6564" [label="[-1]", style=solid]; +"3358 6564" -> "3359 6582" [label="[-1]", style=solid]; +"3359 6582" -> "3360 6583" [label="[-1]", style=solid]; +"3360 6583" -> "3361 6586" [label="[-1]", style=solid]; +"3361 6586" -> "3362 6587" [label="[-1]", style=solid]; +"3362 6587" -> "3363 6588" [label="[-1]", style=solid]; +"3363 6588" -> "3364 6590" [label="[-1]", style=solid]; +"3364 6590" -> "3365 6592" [label="[-1]", style=solid]; +"3365 6592" -> "3366 6593" [label="[-1]", style=solid]; +"3366 6593" -> "3367 6594" [label="[-1]", style=solid]; +"3367 6594" -> "3368 6595" [label="[-1]", style=solid]; +"3368 6595" -> "3369 6597" [label="[-1]", style=dashed]; +"3369 6597" -> "3370 6599" [label="[-1]", style=dashed]; +"3369 6597" -> "3397 6685" [label="[-1]", style=dashed]; +"3369 6597" -> "3417 6667" [label="[-1]", style=dashed]; +"3369 6597" -> "3423 6616" [label="[-1]", style=dashed]; +"3369 6597" -> "3445 6713" [label="[-1]", style=dashed]; +"3369 6597" -> "3457 6633" [label="[-1]", style=dashed]; +"3369 6597" -> "3479 6741" [label="[-1]", style=dashed]; +"3369 6597" -> "3491 6650" [label="[-1]", style=dashed]; +"3369 6597" -> "3513 6769" [label="[-1]", style=dashed]; +"3370 6599" -> "3371 6601" [label="[-1]", style=dashed]; +"3371 6601" -> "3372 6602" [label="[-1]", style=solid]; +"3372 6602" -> "3373 6603" [label="[1, -1]", style=dashed]; +"3373 6603" -> "3374 6604" [label="[-1, 1]", style=dashed]; +"3374 6604" -> "3375 6605" [label="[-1]", style=dashed]; +"3375 6605" -> "3381 6606" [label="[-1]", style=dashed]; +"3376 6539" -> "3377 6544" [label="[-1, 4]", style=solid]; +"3377 6544" -> "3378 6545" [label="[-1, 1]", style=solid]; "3378 6545" -> "3379 6546" [label="[2]", style=dashed]; -"3379 6546" -> "3380 6547" [label="[1, 1]", style=solid]; -"3380 6547" -> "3381 6606" [label="[1, 1]", style=solid]; -"3380 6547" -> "3429 6623" [label="[1, 1]", style=solid]; -"3380 6547" -> "3463 6640" [label="[1, 1]", style=solid]; -"3380 6547" -> "3497 6657" [label="[1, 1]", style=solid]; -"3381 6606" -> "3382 6612" [label="[1, 1]", style=solid]; -"3381 6606" -> "3383 6608" [label="[1, 1]", style=solid]; -"3382 6612" -> "3386 6613" [label="[1, 4]", style=solid]; -"3383 6608" -> "3384 6609" [label="[1, 1]", style=solid]; -"3384 6609" -> "3385 6610" [label="[1]", style=solid]; -"3385 6610" -> "3386 6613" [label="[1]", style=dashed]; -"3386 6613" -> "3387 6614" [label="[1, 256, 14, 14]", style=solid]; -"3387 6614" -> "3388 6702" [label="[1, 256, 14, 14]", style=solid]; -"3387 6614" -> "3391 6699" [label="[1, 256, 14, 14]", style=solid]; -"3387 6614" -> "3394 6696" [label="[1, 256, 14, 14]", style=solid]; -"3387 6614" -> "3408 6676" [label="[1, 256, 14, 14]", style=solid]; -"3387 6614" -> "3411 6673" [label="[1, 256, 14, 14]", style=solid]; -"3387 6614" -> "3414 6670" [label="[1, 256, 14, 14]", style=solid]; -"3387 6614" -> "3422 6711" [label="[1, 256, 14, 14]", style=solid]; +"3379 6546" -> "3380 6547" [label="[-1, -1]", style=solid]; +"3380 6547" -> "3381 6606" [label="[-1, -1]", style=solid]; +"3380 6547" -> "3429 6623" [label="[-1, -1]", style=solid]; +"3380 6547" -> "3463 6640" [label="[-1, -1]", style=solid]; +"3380 6547" -> "3497 6657" [label="[-1, -1]", style=solid]; +"3381 6606" -> "3382 6612" [label="[-1, -1]", style=solid]; +"3381 6606" -> "3383 6608" [label="[-1, -1]", style=solid]; +"3382 6612" -> "3386 6613" [label="[-1, 4]", style=solid]; +"3383 6608" -> "3384 6609" [label="[-1, 1]", style=solid]; +"3384 6609" -> "3385 6610" [label="[-1]", style=solid]; +"3385 6610" -> "3386 6613" [label="[-1]", style=dashed]; +"3386 6613" -> "3387 6614" [label="[-1, 256, 14, 14]", style=solid]; +"3387 6614" -> "3388 6702" [label="[-1, 256, 14, 14]", style=solid]; +"3387 6614" -> "3391 6699" [label="[-1, 256, 14, 14]", style=solid]; +"3387 6614" -> "3394 6696" [label="[-1, 256, 14, 14]", style=solid]; +"3387 6614" -> "3408 6676" [label="[-1, 256, 14, 14]", style=solid]; +"3387 6614" -> "3411 6673" [label="[-1, 256, 14, 14]", style=solid]; +"3387 6614" -> "3414 6670" [label="[-1, 256, 14, 14]", style=solid]; +"3387 6614" -> "3422 6711" [label="[-1, 256, 14, 14]", style=solid]; "3388 6702" -> "3389 6703" [label="[4]", style=dashed]; "3389 6703" -> "3390 6707" [label="[]", style=dashed]; "3390 6707" -> "3405 6708" [label="[1]", style=dashed]; @@ -8311,18 +8311,18 @@ strict digraph { "3394 6696" -> "3395 6697" [label="[4]", style=dashed]; "3395 6697" -> "3396 6705" [label="[]", style=dashed]; "3396 6705" -> "3405 6708" [label="[1]", style=dashed]; -"3397 6685" -> "3398 6687" [label="[1]", style=dashed]; -"3398 6687" -> "3399 6688" [label="[1]", style=solid]; -"3399 6688" -> "3400 6689" [label="[1, 1]", style=dashed]; -"3400 6689" -> "3401 6691" [label="[1, 1]", style=dashed]; -"3401 6691" -> "3402 6693" [label="[1, 1, 1, 1]", style=dashed]; -"3401 6691" -> "3406 6709" [label="[1, 1, 1, 1]", style=dashed]; +"3397 6685" -> "3398 6687" [label="[-1]", style=dashed]; +"3398 6687" -> "3399 6688" [label="[-1]", style=solid]; +"3399 6688" -> "3400 6689" [label="[1, -1]", style=dashed]; +"3400 6689" -> "3401 6691" [label="[-1, 1]", style=dashed]; +"3401 6691" -> "3402 6693" [label="[-1, 1, 1, 1]", style=dashed]; +"3401 6691" -> "3406 6709" [label="[-1, 1, 1, 1]", style=dashed]; "3402 6693" -> "3403 6694" [label="[4]", style=dashed]; "3403 6694" -> "3404 6704" [label="[]", style=dashed]; "3404 6704" -> "3405 6708" [label="[1]", style=dashed]; "3405 6708" -> "3406 6709" [label="[4]", style=dashed]; -"3406 6709" -> "3407 6710" [label="[1, 1, 1, 1]", style=dashed]; -"3407 6710" -> "3422 6711" [label="[1, 1, 1, 1]", style=dashed]; +"3406 6709" -> "3407 6710" [label="[-1, -1, -1, -1]", style=dashed]; +"3407 6710" -> "3422 6711" [label="[-1, -1, -1, -1]", style=dashed]; "3408 6676" -> "3409 6677" [label="[4]", style=dashed]; "3409 6677" -> "3410 6681" [label="[]", style=dashed]; "3410 6681" -> "3420 6682" [label="[1]", style=dashed]; @@ -8336,25 +8336,25 @@ strict digraph { "3418 6668" -> "3419 6678" [label="[]", style=dashed]; "3419 6678" -> "3420 6682" [label="[1]", style=dashed]; "3420 6682" -> "3421 6683" [label="[4]", style=dashed]; -"3421 6683" -> "3422 6711" [label="[1, 1, 1, 1]", style=solid]; -"3422 6711" -> "3456 6739" [label="[1, 1, 1, 1]", style=solid]; -"3423 6616" -> "3424 6618" [label="[1]", style=dashed]; -"3424 6618" -> "3425 6619" [label="[1]", style=solid]; -"3425 6619" -> "3426 6620" [label="[1, 1]", style=dashed]; -"3426 6620" -> "3427 6621" [label="[1, 1]", style=dashed]; -"3427 6621" -> "3428 6622" [label="[1]", style=dashed]; -"3428 6622" -> "3429 6623" [label="[1]", style=dashed]; -"3429 6623" -> "3430 6629" [label="[1, 1]", style=solid]; -"3429 6623" -> "3431 6625" [label="[1, 1]", style=solid]; -"3430 6629" -> "3434 6630" [label="[1, 4]", style=solid]; -"3431 6625" -> "3432 6626" [label="[1, 1]", style=solid]; -"3432 6626" -> "3433 6627" [label="[1]", style=solid]; -"3433 6627" -> "3434 6630" [label="[1]", style=dashed]; -"3434 6630" -> "3435 6631" [label="[1, 256, 14, 14]", style=solid]; -"3435 6631" -> "3436 6730" [label="[1, 256, 14, 14]", style=solid]; -"3435 6631" -> "3439 6727" [label="[1, 256, 14, 14]", style=solid]; -"3435 6631" -> "3442 6724" [label="[1, 256, 14, 14]", style=solid]; -"3435 6631" -> "3456 6739" [label="[1, 256, 14, 14]", style=solid]; +"3421 6683" -> "3422 6711" [label="[-1, -1, -1, -1]", style=solid]; +"3422 6711" -> "3456 6739" [label="[-1, -1, -1, -1]", style=solid]; +"3423 6616" -> "3424 6618" [label="[-1]", style=dashed]; +"3424 6618" -> "3425 6619" [label="[-1]", style=solid]; +"3425 6619" -> "3426 6620" [label="[1, -1]", style=dashed]; +"3426 6620" -> "3427 6621" [label="[-1, 1]", style=dashed]; +"3427 6621" -> "3428 6622" [label="[-1]", style=dashed]; +"3428 6622" -> "3429 6623" [label="[-1]", style=dashed]; +"3429 6623" -> "3430 6629" [label="[-1, -1]", style=solid]; +"3429 6623" -> "3431 6625" [label="[-1, -1]", style=solid]; +"3430 6629" -> "3434 6630" [label="[-1, 4]", style=solid]; +"3431 6625" -> "3432 6626" [label="[-1, 1]", style=solid]; +"3432 6626" -> "3433 6627" [label="[-1]", style=solid]; +"3433 6627" -> "3434 6630" [label="[-1]", style=dashed]; +"3434 6630" -> "3435 6631" [label="[-1, 256, 14, 14]", style=solid]; +"3435 6631" -> "3436 6730" [label="[-1, 256, 14, 14]", style=solid]; +"3435 6631" -> "3439 6727" [label="[-1, 256, 14, 14]", style=solid]; +"3435 6631" -> "3442 6724" [label="[-1, 256, 14, 14]", style=solid]; +"3435 6631" -> "3456 6739" [label="[-1, 256, 14, 14]", style=solid]; "3436 6730" -> "3437 6731" [label="[4]", style=dashed]; "3437 6731" -> "3438 6735" [label="[]", style=dashed]; "3438 6735" -> "3453 6736" [label="[1]", style=dashed]; @@ -8364,36 +8364,36 @@ strict digraph { "3442 6724" -> "3443 6725" [label="[4]", style=dashed]; "3443 6725" -> "3444 6733" [label="[]", style=dashed]; "3444 6733" -> "3453 6736" [label="[1]", style=dashed]; -"3445 6713" -> "3446 6715" [label="[1]", style=dashed]; -"3446 6715" -> "3447 6716" [label="[1]", style=solid]; -"3447 6716" -> "3448 6717" [label="[1, 1]", style=dashed]; -"3448 6717" -> "3449 6719" [label="[1, 1]", style=dashed]; -"3449 6719" -> "3450 6721" [label="[1, 1, 1, 1]", style=dashed]; -"3449 6719" -> "3454 6737" [label="[1, 1, 1, 1]", style=dashed]; +"3445 6713" -> "3446 6715" [label="[-1]", style=dashed]; +"3446 6715" -> "3447 6716" [label="[-1]", style=solid]; +"3447 6716" -> "3448 6717" [label="[1, -1]", style=dashed]; +"3448 6717" -> "3449 6719" [label="[-1, 1]", style=dashed]; +"3449 6719" -> "3450 6721" [label="[-1, 1, 1, 1]", style=dashed]; +"3449 6719" -> "3454 6737" [label="[-1, 1, 1, 1]", style=dashed]; "3450 6721" -> "3451 6722" [label="[4]", style=dashed]; "3451 6722" -> "3452 6732" [label="[]", style=dashed]; "3452 6732" -> "3453 6736" [label="[1]", style=dashed]; "3453 6736" -> "3454 6737" [label="[4]", style=dashed]; -"3454 6737" -> "3455 6738" [label="[1, 1, 1, 1]", style=dashed]; -"3455 6738" -> "3456 6739" [label="[1, 1, 1, 1]", style=dashed]; -"3456 6739" -> "3490 6767" [label="[1, 1, 1, 1]", style=solid]; -"3457 6633" -> "3458 6635" [label="[1]", style=dashed]; -"3458 6635" -> "3459 6636" [label="[1]", style=solid]; -"3459 6636" -> "3460 6637" [label="[1, 1]", style=dashed]; -"3460 6637" -> "3461 6638" [label="[1, 1]", style=dashed]; -"3461 6638" -> "3462 6639" [label="[1]", style=dashed]; -"3462 6639" -> "3463 6640" [label="[1]", style=dashed]; -"3463 6640" -> "3464 6646" [label="[1, 1]", style=solid]; -"3463 6640" -> "3465 6642" [label="[1, 1]", style=solid]; -"3464 6646" -> "3468 6647" [label="[1, 4]", style=solid]; -"3465 6642" -> "3466 6643" [label="[1, 1]", style=solid]; -"3466 6643" -> "3467 6644" [label="[1]", style=solid]; -"3467 6644" -> "3468 6647" [label="[1]", style=dashed]; -"3468 6647" -> "3469 6648" [label="[1, 256, 14, 14]", style=solid]; -"3469 6648" -> "3470 6758" [label="[1, 256, 14, 14]", style=solid]; -"3469 6648" -> "3473 6755" [label="[1, 256, 14, 14]", style=solid]; -"3469 6648" -> "3476 6752" [label="[1, 256, 14, 14]", style=solid]; -"3469 6648" -> "3490 6767" [label="[1, 256, 14, 14]", style=solid]; +"3454 6737" -> "3455 6738" [label="[-1, -1, -1, -1]", style=dashed]; +"3455 6738" -> "3456 6739" [label="[-1, -1, -1, -1]", style=dashed]; +"3456 6739" -> "3490 6767" [label="[-1, -1, -1, -1]", style=solid]; +"3457 6633" -> "3458 6635" [label="[-1]", style=dashed]; +"3458 6635" -> "3459 6636" [label="[-1]", style=solid]; +"3459 6636" -> "3460 6637" [label="[1, -1]", style=dashed]; +"3460 6637" -> "3461 6638" [label="[-1, 1]", style=dashed]; +"3461 6638" -> "3462 6639" [label="[-1]", style=dashed]; +"3462 6639" -> "3463 6640" [label="[-1]", style=dashed]; +"3463 6640" -> "3464 6646" [label="[-1, -1]", style=solid]; +"3463 6640" -> "3465 6642" [label="[-1, -1]", style=solid]; +"3464 6646" -> "3468 6647" [label="[-1, 4]", style=solid]; +"3465 6642" -> "3466 6643" [label="[-1, 1]", style=solid]; +"3466 6643" -> "3467 6644" [label="[-1]", style=solid]; +"3467 6644" -> "3468 6647" [label="[-1]", style=dashed]; +"3468 6647" -> "3469 6648" [label="[-1, 256, 14, 14]", style=solid]; +"3469 6648" -> "3470 6758" [label="[-1, 256, 14, 14]", style=solid]; +"3469 6648" -> "3473 6755" [label="[-1, 256, 14, 14]", style=solid]; +"3469 6648" -> "3476 6752" [label="[-1, 256, 14, 14]", style=solid]; +"3469 6648" -> "3490 6767" [label="[-1, 256, 14, 14]", style=solid]; "3470 6758" -> "3471 6759" [label="[4]", style=dashed]; "3471 6759" -> "3472 6763" [label="[]", style=dashed]; "3472 6763" -> "3487 6764" [label="[1]", style=dashed]; @@ -8403,36 +8403,36 @@ strict digraph { "3476 6752" -> "3477 6753" [label="[4]", style=dashed]; "3477 6753" -> "3478 6761" [label="[]", style=dashed]; "3478 6761" -> "3487 6764" [label="[1]", style=dashed]; -"3479 6741" -> "3480 6743" [label="[1]", style=dashed]; -"3480 6743" -> "3481 6744" [label="[1]", style=solid]; -"3481 6744" -> "3482 6745" [label="[1, 1]", style=dashed]; -"3482 6745" -> "3483 6747" [label="[1, 1]", style=dashed]; -"3483 6747" -> "3484 6749" [label="[1, 1, 1, 1]", style=dashed]; -"3483 6747" -> "3488 6765" [label="[1, 1, 1, 1]", style=dashed]; +"3479 6741" -> "3480 6743" [label="[-1]", style=dashed]; +"3480 6743" -> "3481 6744" [label="[-1]", style=solid]; +"3481 6744" -> "3482 6745" [label="[1, -1]", style=dashed]; +"3482 6745" -> "3483 6747" [label="[-1, 1]", style=dashed]; +"3483 6747" -> "3484 6749" [label="[-1, 1, 1, 1]", style=dashed]; +"3483 6747" -> "3488 6765" [label="[-1, 1, 1, 1]", style=dashed]; "3484 6749" -> "3485 6750" [label="[4]", style=dashed]; "3485 6750" -> "3486 6760" [label="[]", style=dashed]; "3486 6760" -> "3487 6764" [label="[1]", style=dashed]; "3487 6764" -> "3488 6765" [label="[4]", style=dashed]; -"3488 6765" -> "3489 6766" [label="[1, 1, 1, 1]", style=dashed]; -"3489 6766" -> "3490 6767" [label="[1, 1, 1, 1]", style=dashed]; -"3490 6767" -> "3524 6795" [label="[1, 1, 1, 1]", style=solid]; -"3491 6650" -> "3492 6652" [label="[1]", style=dashed]; -"3492 6652" -> "3493 6653" [label="[1]", style=solid]; -"3493 6653" -> "3494 6654" [label="[1, 1]", style=dashed]; -"3494 6654" -> "3495 6655" [label="[1, 1]", style=dashed]; -"3495 6655" -> "3496 6656" [label="[1]", style=dashed]; -"3496 6656" -> "3497 6657" [label="[1]", style=dashed]; -"3497 6657" -> "3498 6663" [label="[1, 1]", style=solid]; -"3497 6657" -> "3499 6659" [label="[1, 1]", style=solid]; -"3498 6663" -> "3502 6664" [label="[1, 4]", style=solid]; -"3499 6659" -> "3500 6660" [label="[1, 1]", style=solid]; -"3500 6660" -> "3501 6661" [label="[1]", style=solid]; -"3501 6661" -> "3502 6664" [label="[1]", style=dashed]; -"3502 6664" -> "3503 6665" [label="[1, 256, 14, 14]", style=solid]; -"3503 6665" -> "3504 6786" [label="[1, 256, 14, 14]", style=solid]; -"3503 6665" -> "3507 6783" [label="[1, 256, 14, 14]", style=solid]; -"3503 6665" -> "3510 6780" [label="[1, 256, 14, 14]", style=solid]; -"3503 6665" -> "3524 6795" [label="[1, 256, 14, 14]", style=solid]; +"3488 6765" -> "3489 6766" [label="[-1, -1, -1, -1]", style=dashed]; +"3489 6766" -> "3490 6767" [label="[-1, -1, -1, -1]", style=dashed]; +"3490 6767" -> "3524 6795" [label="[-1, -1, -1, -1]", style=solid]; +"3491 6650" -> "3492 6652" [label="[-1]", style=dashed]; +"3492 6652" -> "3493 6653" [label="[-1]", style=solid]; +"3493 6653" -> "3494 6654" [label="[1, -1]", style=dashed]; +"3494 6654" -> "3495 6655" [label="[-1, 1]", style=dashed]; +"3495 6655" -> "3496 6656" [label="[-1]", style=dashed]; +"3496 6656" -> "3497 6657" [label="[-1]", style=dashed]; +"3497 6657" -> "3498 6663" [label="[-1, -1]", style=solid]; +"3497 6657" -> "3499 6659" [label="[-1, -1]", style=solid]; +"3498 6663" -> "3502 6664" [label="[-1, 4]", style=solid]; +"3499 6659" -> "3500 6660" [label="[-1, 1]", style=solid]; +"3500 6660" -> "3501 6661" [label="[-1]", style=solid]; +"3501 6661" -> "3502 6664" [label="[-1]", style=dashed]; +"3502 6664" -> "3503 6665" [label="[-1, 256, 14, 14]", style=solid]; +"3503 6665" -> "3504 6786" [label="[-1, 256, 14, 14]", style=solid]; +"3503 6665" -> "3507 6783" [label="[-1, 256, 14, 14]", style=solid]; +"3503 6665" -> "3510 6780" [label="[-1, 256, 14, 14]", style=solid]; +"3503 6665" -> "3524 6795" [label="[-1, 256, 14, 14]", style=solid]; "3504 6786" -> "3505 6787" [label="[4]", style=dashed]; "3505 6787" -> "3506 6791" [label="[]", style=dashed]; "3506 6791" -> "3521 6792" [label="[1]", style=dashed]; @@ -8442,372 +8442,372 @@ strict digraph { "3510 6780" -> "3511 6781" [label="[4]", style=dashed]; "3511 6781" -> "3512 6789" [label="[]", style=dashed]; "3512 6789" -> "3521 6792" [label="[1]", style=dashed]; -"3513 6769" -> "3514 6771" [label="[1]", style=dashed]; -"3514 6771" -> "3515 6772" [label="[1]", style=solid]; -"3515 6772" -> "3516 6773" [label="[1, 1]", style=dashed]; -"3516 6773" -> "3517 6775" [label="[1, 1]", style=dashed]; -"3517 6775" -> "3518 6777" [label="[1, 1, 1, 1]", style=dashed]; -"3517 6775" -> "3522 6793" [label="[1, 1, 1, 1]", style=dashed]; +"3513 6769" -> "3514 6771" [label="[-1]", style=dashed]; +"3514 6771" -> "3515 6772" [label="[-1]", style=solid]; +"3515 6772" -> "3516 6773" [label="[1, -1]", style=dashed]; +"3516 6773" -> "3517 6775" [label="[-1, 1]", style=dashed]; +"3517 6775" -> "3518 6777" [label="[-1, 1, 1, 1]", style=dashed]; +"3517 6775" -> "3522 6793" [label="[-1, 1, 1, 1]", style=dashed]; "3518 6777" -> "3519 6778" [label="[4]", style=dashed]; "3519 6778" -> "3520 6788" [label="[]", style=dashed]; "3520 6788" -> "3521 6792" [label="[1]", style=dashed]; "3521 6792" -> "3522 6793" [label="[4]", style=dashed]; -"3522 6793" -> "3523 6794" [label="[1, 1, 1, 1]", style=dashed]; -"3523 6794" -> "3524 6795" [label="[1, 1, 1, 1]", style=dashed]; -"3524 6795" -> "3525 6798" [label="[1, 1, 1, 1]", style=solid]; -"3525 6798" -> "3526 6799" [label="[1, 256, 1, 1]", style=solid]; -"3526 6799" -> "3527 6802" [label="[1, 256, 1, 1]", style=solid]; -"3527 6802" -> "3528 6803" [label="[1, 256, 1, 1]", style=solid]; -"3528 6803" -> "3529 6806" [label="[1, 256, 1, 1]", style=solid]; -"3529 6806" -> "3530 6807" [label="[1, 256, 1, 1]", style=solid]; -"3530 6807" -> "3531 6810" [label="[1, 256, 1, 1]", style=solid]; -"3531 6810" -> "3532 6811" [label="[1, 256, 1, 1]", style=solid]; -"3532 6811" -> "3533 6814" [label="[1, 256, 1, 1]", style=solid]; -"3533 6814" -> "3534 6815" [label="[1, 256, 1, 1]", style=solid]; -"3534 6815" -> "3535 6818" [label="[1, 256, 1, 1]", style=solid]; -"3535 6818" -> "3536 6819" [label="[1, 81, 1, 1]", style=solid]; -"3535 6818" -> "3539 6822" [label="[1, 81, 1, 1]", style=solid]; -"3536 6819" -> "3537 6844" [label="[1, 81, 1, 1]", style=solid]; -"3536 6819" -> "3873 6835" [label="[1, 81, 1, 1]", style=solid]; -"3536 6819" -> "3876 6832" [label="[1, 81, 1, 1]", style=solid]; -"3536 6819" -> "3881 6842" [label="[1, 81, 1, 1]", style=solid]; +"3522 6793" -> "3523 6794" [label="[-1, -1, -1, -1]", style=dashed]; +"3523 6794" -> "3524 6795" [label="[-1, -1, -1, -1]", style=dashed]; +"3524 6795" -> "3525 6798" [label="[-1, -1, -1, -1]", style=solid]; +"3525 6798" -> "3526 6799" [label="[-1, 256, -1, -1]", style=solid]; +"3526 6799" -> "3527 6802" [label="[-1, 256, -1, -1]", style=solid]; +"3527 6802" -> "3528 6803" [label="[-1, 256, -1, -1]", style=solid]; +"3528 6803" -> "3529 6806" [label="[-1, 256, -1, -1]", style=solid]; +"3529 6806" -> "3530 6807" [label="[-1, 256, -1, -1]", style=solid]; +"3530 6807" -> "3531 6810" [label="[-1, 256, -1, -1]", style=solid]; +"3531 6810" -> "3532 6811" [label="[-1, 256, -1, -1]", style=solid]; +"3532 6811" -> "3533 6814" [label="[-1, 256, -1, -1]", style=solid]; +"3533 6814" -> "3534 6815" [label="[-1, 256, -1, -1]", style=solid]; +"3534 6815" -> "3535 6818" [label="[-1, 256, -1, -1]", style=solid]; +"3535 6818" -> "3536 6819" [label="[-1, 81, -1, -1]", style=solid]; +"3535 6818" -> "3539 6822" [label="[-1, 81, -1, -1]", style=solid]; +"3536 6819" -> "3537 6844" [label="[-1, 81, -1, -1]", style=solid]; +"3536 6819" -> "3873 6835" [label="[-1, 81, -1, -1]", style=solid]; +"3536 6819" -> "3876 6832" [label="[-1, 81, -1, -1]", style=solid]; +"3536 6819" -> "3881 6842" [label="[-1, 81, -1, -1]", style=solid]; "3537 6844" -> "3538 6845" [label="[4]", style=dashed]; "3538 6845" -> "3548 6846" [label="[]", style=dashed]; "3539 6822" -> "3540 6823" [label="[4]", style=dashed]; "3540 6823" -> "3541 6824" [label="[]", style=dashed]; "3541 6824" -> "3542 6825" [label="[1]", style=dashed]; "3542 6825" -> "3543 6826" [label="[1]", style=dashed]; -"3543 6826" -> "3544 6827" [label="[1]", style=dashed]; -"3544 6827" -> "3545 6828" [label="[1]", style=solid]; -"3545 6828" -> "3546 6829" [label="[1, 1]", style=dashed]; -"3546 6829" -> "3547 6830" [label="[1, 1]", style=dashed]; -"3547 6830" -> "3548 6846" [label="[1]", style=dashed]; -"3548 6846" -> "3872 6847" [label="[1]", style=dashed]; -"3549 6513" -> "3550 6515" [label="[1]", style=solid]; -"3550 6515" -> "3551 6516" [label="[1]", style=solid]; -"3551 6516" -> "3552 6517" [label="[1]", style=dashed]; -"3552 6517" -> "3869 6519" [label="[1]", style=dashed]; -"3553 6469" -> "3554 6471" [label="[1]", style=solid]; -"3554 6471" -> "3555 6472" [label="[1]", style=solid]; -"3555 6472" -> "3556 6473" [label="[1]", style=dashed]; -"3556 6473" -> "3869 6519" [label="[1]", style=dashed]; -"3557 6425" -> "3558 6427" [label="[1]", style=solid]; -"3558 6427" -> "3559 6428" [label="[1]", style=solid]; -"3559 6428" -> "3560 6429" [label="[1]", style=dashed]; -"3560 6429" -> "3869 6519" [label="[1]", style=dashed]; -"3561 6381" -> "3562 6383" [label="[1]", style=solid]; -"3562 6383" -> "3563 6384" [label="[1]", style=solid]; -"3563 6384" -> "3564 6385" [label="[1]", style=dashed]; -"3564 6385" -> "3869 6519" [label="[1]", style=dashed]; -"3565 6337" -> "3566 6339" [label="[1]", style=solid]; -"3566 6339" -> "3567 6340" [label="[1]", style=solid]; -"3567 6340" -> "3568 6341" [label="[1]", style=dashed]; -"3568 6341" -> "3869 6519" [label="[1]", style=dashed]; -"3569 6293" -> "3570 6295" [label="[1]", style=solid]; -"3570 6295" -> "3571 6296" [label="[1]", style=solid]; -"3571 6296" -> "3572 6297" [label="[1]", style=dashed]; -"3572 6297" -> "3869 6519" [label="[1]", style=dashed]; -"3573 6249" -> "3574 6251" [label="[1]", style=solid]; -"3574 6251" -> "3575 6252" [label="[1]", style=solid]; -"3575 6252" -> "3576 6253" [label="[1]", style=dashed]; -"3576 6253" -> "3869 6519" [label="[1]", style=dashed]; -"3577 6205" -> "3578 6207" [label="[1]", style=solid]; -"3578 6207" -> "3579 6208" [label="[1]", style=solid]; -"3579 6208" -> "3580 6209" [label="[1]", style=dashed]; -"3580 6209" -> "3869 6519" [label="[1]", style=dashed]; -"3581 6161" -> "3582 6163" [label="[1]", style=solid]; -"3582 6163" -> "3583 6164" [label="[1]", style=solid]; -"3583 6164" -> "3584 6165" [label="[1]", style=dashed]; -"3584 6165" -> "3869 6519" [label="[1]", style=dashed]; -"3585 6117" -> "3586 6119" [label="[1]", style=solid]; -"3586 6119" -> "3587 6120" [label="[1]", style=solid]; -"3587 6120" -> "3588 6121" [label="[1]", style=dashed]; -"3588 6121" -> "3869 6519" [label="[1]", style=dashed]; -"3589 6073" -> "3590 6075" [label="[1]", style=solid]; -"3590 6075" -> "3591 6076" [label="[1]", style=solid]; -"3591 6076" -> "3592 6077" [label="[1]", style=dashed]; -"3592 6077" -> "3869 6519" [label="[1]", style=dashed]; -"3593 6029" -> "3594 6031" [label="[1]", style=solid]; -"3594 6031" -> "3595 6032" [label="[1]", style=solid]; -"3595 6032" -> "3596 6033" [label="[1]", style=dashed]; -"3596 6033" -> "3869 6519" [label="[1]", style=dashed]; -"3597 5985" -> "3598 5987" [label="[1]", style=solid]; -"3598 5987" -> "3599 5988" [label="[1]", style=solid]; -"3599 5988" -> "3600 5989" [label="[1]", style=dashed]; -"3600 5989" -> "3869 6519" [label="[1]", style=dashed]; -"3601 5941" -> "3602 5943" [label="[1]", style=solid]; -"3602 5943" -> "3603 5944" [label="[1]", style=solid]; -"3603 5944" -> "3604 5945" [label="[1]", style=dashed]; -"3604 5945" -> "3869 6519" [label="[1]", style=dashed]; -"3605 5897" -> "3606 5899" [label="[1]", style=solid]; -"3606 5899" -> "3607 5900" [label="[1]", style=solid]; -"3607 5900" -> "3608 5901" [label="[1]", style=dashed]; -"3608 5901" -> "3869 6519" [label="[1]", style=dashed]; -"3609 5853" -> "3610 5855" [label="[1]", style=solid]; -"3610 5855" -> "3611 5856" [label="[1]", style=solid]; -"3611 5856" -> "3612 5857" [label="[1]", style=dashed]; -"3612 5857" -> "3869 6519" [label="[1]", style=dashed]; -"3613 5809" -> "3614 5811" [label="[1]", style=solid]; -"3614 5811" -> "3615 5812" [label="[1]", style=solid]; -"3615 5812" -> "3616 5813" [label="[1]", style=dashed]; -"3616 5813" -> "3869 6519" [label="[1]", style=dashed]; -"3617 5765" -> "3618 5767" [label="[1]", style=solid]; -"3618 5767" -> "3619 5768" [label="[1]", style=solid]; -"3619 5768" -> "3620 5769" [label="[1]", style=dashed]; -"3620 5769" -> "3869 6519" [label="[1]", style=dashed]; -"3621 5721" -> "3622 5723" [label="[1]", style=solid]; -"3622 5723" -> "3623 5724" [label="[1]", style=solid]; -"3623 5724" -> "3624 5725" [label="[1]", style=dashed]; -"3624 5725" -> "3869 6519" [label="[1]", style=dashed]; -"3625 5677" -> "3626 5679" [label="[1]", style=solid]; -"3626 5679" -> "3627 5680" [label="[1]", style=solid]; -"3627 5680" -> "3628 5681" [label="[1]", style=dashed]; -"3628 5681" -> "3869 6519" [label="[1]", style=dashed]; -"3629 5633" -> "3630 5635" [label="[1]", style=solid]; -"3630 5635" -> "3631 5636" [label="[1]", style=solid]; -"3631 5636" -> "3632 5637" [label="[1]", style=dashed]; -"3632 5637" -> "3869 6519" [label="[1]", style=dashed]; -"3633 5589" -> "3634 5591" [label="[1]", style=solid]; -"3634 5591" -> "3635 5592" [label="[1]", style=solid]; -"3635 5592" -> "3636 5593" [label="[1]", style=dashed]; -"3636 5593" -> "3869 6519" [label="[1]", style=dashed]; -"3637 5545" -> "3638 5547" [label="[1]", style=solid]; -"3638 5547" -> "3639 5548" [label="[1]", style=solid]; -"3639 5548" -> "3640 5549" [label="[1]", style=dashed]; -"3640 5549" -> "3869 6519" [label="[1]", style=dashed]; -"3641 5501" -> "3642 5503" [label="[1]", style=solid]; -"3642 5503" -> "3643 5504" [label="[1]", style=solid]; -"3643 5504" -> "3644 5505" [label="[1]", style=dashed]; -"3644 5505" -> "3869 6519" [label="[1]", style=dashed]; -"3645 5457" -> "3646 5459" [label="[1]", style=solid]; -"3646 5459" -> "3647 5460" [label="[1]", style=solid]; -"3647 5460" -> "3648 5461" [label="[1]", style=dashed]; -"3648 5461" -> "3869 6519" [label="[1]", style=dashed]; -"3649 5413" -> "3650 5415" [label="[1]", style=solid]; -"3650 5415" -> "3651 5416" [label="[1]", style=solid]; -"3651 5416" -> "3652 5417" [label="[1]", style=dashed]; -"3652 5417" -> "3869 6519" [label="[1]", style=dashed]; -"3653 5369" -> "3654 5371" [label="[1]", style=solid]; -"3654 5371" -> "3655 5372" [label="[1]", style=solid]; -"3655 5372" -> "3656 5373" [label="[1]", style=dashed]; -"3656 5373" -> "3869 6519" [label="[1]", style=dashed]; -"3657 5325" -> "3658 5327" [label="[1]", style=solid]; -"3658 5327" -> "3659 5328" [label="[1]", style=solid]; -"3659 5328" -> "3660 5329" [label="[1]", style=dashed]; -"3660 5329" -> "3869 6519" [label="[1]", style=dashed]; -"3661 5281" -> "3662 5283" [label="[1]", style=solid]; -"3662 5283" -> "3663 5284" [label="[1]", style=solid]; -"3663 5284" -> "3664 5285" [label="[1]", style=dashed]; -"3664 5285" -> "3869 6519" [label="[1]", style=dashed]; -"3665 5237" -> "3666 5239" [label="[1]", style=solid]; -"3666 5239" -> "3667 5240" [label="[1]", style=solid]; -"3667 5240" -> "3668 5241" [label="[1]", style=dashed]; -"3668 5241" -> "3869 6519" [label="[1]", style=dashed]; -"3669 5193" -> "3670 5195" [label="[1]", style=solid]; -"3670 5195" -> "3671 5196" [label="[1]", style=solid]; -"3671 5196" -> "3672 5197" [label="[1]", style=dashed]; -"3672 5197" -> "3869 6519" [label="[1]", style=dashed]; -"3673 5149" -> "3674 5151" [label="[1]", style=solid]; -"3674 5151" -> "3675 5152" [label="[1]", style=solid]; -"3675 5152" -> "3676 5153" [label="[1]", style=dashed]; -"3676 5153" -> "3869 6519" [label="[1]", style=dashed]; -"3677 5105" -> "3678 5107" [label="[1]", style=solid]; -"3678 5107" -> "3679 5108" [label="[1]", style=solid]; -"3679 5108" -> "3680 5109" [label="[1]", style=dashed]; -"3680 5109" -> "3869 6519" [label="[1]", style=dashed]; -"3681 5061" -> "3682 5063" [label="[1]", style=solid]; -"3682 5063" -> "3683 5064" [label="[1]", style=solid]; -"3683 5064" -> "3684 5065" [label="[1]", style=dashed]; -"3684 5065" -> "3869 6519" [label="[1]", style=dashed]; -"3685 5017" -> "3686 5019" [label="[1]", style=solid]; -"3686 5019" -> "3687 5020" [label="[1]", style=solid]; -"3687 5020" -> "3688 5021" [label="[1]", style=dashed]; -"3688 5021" -> "3869 6519" [label="[1]", style=dashed]; -"3689 4973" -> "3690 4975" [label="[1]", style=solid]; -"3690 4975" -> "3691 4976" [label="[1]", style=solid]; -"3691 4976" -> "3692 4977" [label="[1]", style=dashed]; -"3692 4977" -> "3869 6519" [label="[1]", style=dashed]; -"3693 4929" -> "3694 4931" [label="[1]", style=solid]; -"3694 4931" -> "3695 4932" [label="[1]", style=solid]; -"3695 4932" -> "3696 4933" [label="[1]", style=dashed]; -"3696 4933" -> "3869 6519" [label="[1]", style=dashed]; -"3697 4885" -> "3698 4887" [label="[1]", style=solid]; -"3698 4887" -> "3699 4888" [label="[1]", style=solid]; -"3699 4888" -> "3700 4889" [label="[1]", style=dashed]; -"3700 4889" -> "3869 6519" [label="[1]", style=dashed]; -"3701 4841" -> "3702 4843" [label="[1]", style=solid]; -"3702 4843" -> "3703 4844" [label="[1]", style=solid]; -"3703 4844" -> "3704 4845" [label="[1]", style=dashed]; -"3704 4845" -> "3869 6519" [label="[1]", style=dashed]; -"3705 4797" -> "3706 4799" [label="[1]", style=solid]; -"3706 4799" -> "3707 4800" [label="[1]", style=solid]; -"3707 4800" -> "3708 4801" [label="[1]", style=dashed]; -"3708 4801" -> "3869 6519" [label="[1]", style=dashed]; -"3709 4753" -> "3710 4755" [label="[1]", style=solid]; -"3710 4755" -> "3711 4756" [label="[1]", style=solid]; -"3711 4756" -> "3712 4757" [label="[1]", style=dashed]; -"3712 4757" -> "3869 6519" [label="[1]", style=dashed]; -"3713 4709" -> "3714 4711" [label="[1]", style=solid]; -"3714 4711" -> "3715 4712" [label="[1]", style=solid]; -"3715 4712" -> "3716 4713" [label="[1]", style=dashed]; -"3716 4713" -> "3869 6519" [label="[1]", style=dashed]; -"3717 4665" -> "3718 4667" [label="[1]", style=solid]; -"3718 4667" -> "3719 4668" [label="[1]", style=solid]; -"3719 4668" -> "3720 4669" [label="[1]", style=dashed]; -"3720 4669" -> "3869 6519" [label="[1]", style=dashed]; -"3721 4621" -> "3722 4623" [label="[1]", style=solid]; -"3722 4623" -> "3723 4624" [label="[1]", style=solid]; -"3723 4624" -> "3724 4625" [label="[1]", style=dashed]; -"3724 4625" -> "3869 6519" [label="[1]", style=dashed]; -"3725 4577" -> "3726 4579" [label="[1]", style=solid]; -"3726 4579" -> "3727 4580" [label="[1]", style=solid]; -"3727 4580" -> "3728 4581" [label="[1]", style=dashed]; -"3728 4581" -> "3869 6519" [label="[1]", style=dashed]; -"3729 4533" -> "3730 4535" [label="[1]", style=solid]; -"3730 4535" -> "3731 4536" [label="[1]", style=solid]; -"3731 4536" -> "3732 4537" [label="[1]", style=dashed]; -"3732 4537" -> "3869 6519" [label="[1]", style=dashed]; -"3733 4489" -> "3734 4491" [label="[1]", style=solid]; -"3734 4491" -> "3735 4492" [label="[1]", style=solid]; -"3735 4492" -> "3736 4493" [label="[1]", style=dashed]; -"3736 4493" -> "3869 6519" [label="[1]", style=dashed]; -"3737 4445" -> "3738 4447" [label="[1]", style=solid]; -"3738 4447" -> "3739 4448" [label="[1]", style=solid]; -"3739 4448" -> "3740 4449" [label="[1]", style=dashed]; -"3740 4449" -> "3869 6519" [label="[1]", style=dashed]; -"3741 4401" -> "3742 4403" [label="[1]", style=solid]; -"3742 4403" -> "3743 4404" [label="[1]", style=solid]; -"3743 4404" -> "3744 4405" [label="[1]", style=dashed]; -"3744 4405" -> "3869 6519" [label="[1]", style=dashed]; -"3745 4357" -> "3746 4359" [label="[1]", style=solid]; -"3746 4359" -> "3747 4360" [label="[1]", style=solid]; -"3747 4360" -> "3748 4361" [label="[1]", style=dashed]; -"3748 4361" -> "3869 6519" [label="[1]", style=dashed]; -"3749 4313" -> "3750 4315" [label="[1]", style=solid]; -"3750 4315" -> "3751 4316" [label="[1]", style=solid]; -"3751 4316" -> "3752 4317" [label="[1]", style=dashed]; -"3752 4317" -> "3869 6519" [label="[1]", style=dashed]; -"3753 4269" -> "3754 4271" [label="[1]", style=solid]; -"3754 4271" -> "3755 4272" [label="[1]", style=solid]; -"3755 4272" -> "3756 4273" [label="[1]", style=dashed]; -"3756 4273" -> "3869 6519" [label="[1]", style=dashed]; -"3757 4225" -> "3758 4227" [label="[1]", style=solid]; -"3758 4227" -> "3759 4228" [label="[1]", style=solid]; -"3759 4228" -> "3760 4229" [label="[1]", style=dashed]; -"3760 4229" -> "3869 6519" [label="[1]", style=dashed]; -"3761 4181" -> "3762 4183" [label="[1]", style=solid]; -"3762 4183" -> "3763 4184" [label="[1]", style=solid]; -"3763 4184" -> "3764 4185" [label="[1]", style=dashed]; -"3764 4185" -> "3869 6519" [label="[1]", style=dashed]; -"3765 4137" -> "3766 4139" [label="[1]", style=solid]; -"3766 4139" -> "3767 4140" [label="[1]", style=solid]; -"3767 4140" -> "3768 4141" [label="[1]", style=dashed]; -"3768 4141" -> "3869 6519" [label="[1]", style=dashed]; -"3769 4093" -> "3770 4095" [label="[1]", style=solid]; -"3770 4095" -> "3771 4096" [label="[1]", style=solid]; -"3771 4096" -> "3772 4097" [label="[1]", style=dashed]; -"3772 4097" -> "3869 6519" [label="[1]", style=dashed]; -"3773 4049" -> "3774 4051" [label="[1]", style=solid]; -"3774 4051" -> "3775 4052" [label="[1]", style=solid]; -"3775 4052" -> "3776 4053" [label="[1]", style=dashed]; -"3776 4053" -> "3869 6519" [label="[1]", style=dashed]; -"3777 4005" -> "3778 4007" [label="[1]", style=solid]; -"3778 4007" -> "3779 4008" [label="[1]", style=solid]; -"3779 4008" -> "3780 4009" [label="[1]", style=dashed]; -"3780 4009" -> "3869 6519" [label="[1]", style=dashed]; -"3781 3961" -> "3782 3963" [label="[1]", style=solid]; -"3782 3963" -> "3783 3964" [label="[1]", style=solid]; -"3783 3964" -> "3784 3965" [label="[1]", style=dashed]; -"3784 3965" -> "3869 6519" [label="[1]", style=dashed]; -"3785 3917" -> "3786 3919" [label="[1]", style=solid]; -"3786 3919" -> "3787 3920" [label="[1]", style=solid]; -"3787 3920" -> "3788 3921" [label="[1]", style=dashed]; -"3788 3921" -> "3869 6519" [label="[1]", style=dashed]; -"3789 3873" -> "3790 3875" [label="[1]", style=solid]; -"3790 3875" -> "3791 3876" [label="[1]", style=solid]; -"3791 3876" -> "3792 3877" [label="[1]", style=dashed]; -"3792 3877" -> "3869 6519" [label="[1]", style=dashed]; -"3793 3829" -> "3794 3831" [label="[1]", style=solid]; -"3794 3831" -> "3795 3832" [label="[1]", style=solid]; -"3795 3832" -> "3796 3833" [label="[1]", style=dashed]; -"3796 3833" -> "3869 6519" [label="[1]", style=dashed]; -"3797 3785" -> "3798 3787" [label="[1]", style=solid]; -"3798 3787" -> "3799 3788" [label="[1]", style=solid]; -"3799 3788" -> "3800 3789" [label="[1]", style=dashed]; -"3800 3789" -> "3869 6519" [label="[1]", style=dashed]; -"3801 3741" -> "3802 3743" [label="[1]", style=solid]; -"3802 3743" -> "3803 3744" [label="[1]", style=solid]; -"3803 3744" -> "3804 3745" [label="[1]", style=dashed]; -"3804 3745" -> "3869 6519" [label="[1]", style=dashed]; -"3805 3697" -> "3806 3699" [label="[1]", style=solid]; -"3806 3699" -> "3807 3700" [label="[1]", style=solid]; -"3807 3700" -> "3808 3701" [label="[1]", style=dashed]; -"3808 3701" -> "3869 6519" [label="[1]", style=dashed]; -"3809 3653" -> "3810 3655" [label="[1]", style=solid]; -"3810 3655" -> "3811 3656" [label="[1]", style=solid]; -"3811 3656" -> "3812 3657" [label="[1]", style=dashed]; -"3812 3657" -> "3869 6519" [label="[1]", style=dashed]; -"3813 3609" -> "3814 3611" [label="[1]", style=solid]; -"3814 3611" -> "3815 3612" [label="[1]", style=solid]; -"3815 3612" -> "3816 3613" [label="[1]", style=dashed]; -"3816 3613" -> "3869 6519" [label="[1]", style=dashed]; -"3817 3565" -> "3818 3567" [label="[1]", style=solid]; -"3818 3567" -> "3819 3568" [label="[1]", style=solid]; -"3819 3568" -> "3820 3569" [label="[1]", style=dashed]; -"3820 3569" -> "3869 6519" [label="[1]", style=dashed]; -"3821 3521" -> "3822 3523" [label="[1]", style=solid]; -"3822 3523" -> "3823 3524" [label="[1]", style=solid]; -"3823 3524" -> "3824 3525" [label="[1]", style=dashed]; -"3824 3525" -> "3869 6519" [label="[1]", style=dashed]; -"3825 3477" -> "3826 3479" [label="[1]", style=solid]; -"3826 3479" -> "3827 3480" [label="[1]", style=solid]; -"3827 3480" -> "3828 3481" [label="[1]", style=dashed]; -"3828 3481" -> "3869 6519" [label="[1]", style=dashed]; -"3829 3433" -> "3830 3435" [label="[1]", style=solid]; -"3830 3435" -> "3831 3436" [label="[1]", style=solid]; -"3831 3436" -> "3832 3437" [label="[1]", style=dashed]; -"3832 3437" -> "3869 6519" [label="[1]", style=dashed]; -"3833 3389" -> "3834 3391" [label="[1]", style=solid]; -"3834 3391" -> "3835 3392" [label="[1]", style=solid]; -"3835 3392" -> "3836 3393" [label="[1]", style=dashed]; -"3836 3393" -> "3869 6519" [label="[1]", style=dashed]; -"3837 3345" -> "3838 3347" [label="[1]", style=solid]; -"3838 3347" -> "3839 3348" [label="[1]", style=solid]; -"3839 3348" -> "3840 3349" [label="[1]", style=dashed]; -"3840 3349" -> "3869 6519" [label="[1]", style=dashed]; -"3841 3301" -> "3842 3303" [label="[1]", style=solid]; -"3842 3303" -> "3843 3304" [label="[1]", style=solid]; -"3843 3304" -> "3844 3305" [label="[1]", style=dashed]; -"3844 3305" -> "3869 6519" [label="[1]", style=dashed]; -"3845 3257" -> "3846 3259" [label="[1]", style=solid]; -"3846 3259" -> "3847 3260" [label="[1]", style=solid]; -"3847 3260" -> "3848 3261" [label="[1]", style=dashed]; -"3848 3261" -> "3869 6519" [label="[1]", style=dashed]; -"3849 3213" -> "3850 3215" [label="[1]", style=solid]; -"3850 3215" -> "3851 3216" [label="[1]", style=solid]; -"3851 3216" -> "3852 3217" [label="[1]", style=dashed]; -"3852 3217" -> "3869 6519" [label="[1]", style=dashed]; -"3853 3169" -> "3854 3171" [label="[1]", style=solid]; -"3854 3171" -> "3855 3172" [label="[1]", style=solid]; -"3855 3172" -> "3856 3173" [label="[1]", style=dashed]; -"3856 3173" -> "3869 6519" [label="[1]", style=dashed]; -"3857 3125" -> "3858 3127" [label="[1]", style=solid]; -"3858 3127" -> "3859 3128" [label="[1]", style=solid]; -"3859 3128" -> "3860 3129" [label="[1]", style=dashed]; -"3860 3129" -> "3869 6519" [label="[1]", style=dashed]; -"3861 3081" -> "3862 3083" [label="[1]", style=solid]; -"3862 3083" -> "3863 3084" [label="[1]", style=solid]; -"3863 3084" -> "3864 3085" [label="[1]", style=dashed]; -"3864 3085" -> "3869 6519" [label="[1]", style=dashed]; -"3865 3037" -> "3866 3039" [label="[1]", style=solid]; -"3866 3039" -> "3867 3040" [label="[1]", style=solid]; -"3867 3040" -> "3868 3041" [label="[1]", style=dashed]; -"3868 3041" -> "3869 6519" [label="[1]", style=dashed]; -"3869 6519" -> "3870 6532" [label="[1]", style=dashed]; -"3870 6532" -> "3871 6820" [label="[1]", style=dashed]; -"3870 6532" -> "3888 nncf_model_output_1" [label="[1]", style=dashed]; -"3871 6820" -> "3872 6847" [label="[1]", style=dashed]; -"3872 6847" -> "3882 6848" [label="[1]", style=dashed]; +"3543 6826" -> "3544 6827" [label="[-1]", style=dashed]; +"3544 6827" -> "3545 6828" [label="[-1]", style=solid]; +"3545 6828" -> "3546 6829" [label="[1, -1]", style=dashed]; +"3546 6829" -> "3547 6830" [label="[-1, 1]", style=dashed]; +"3547 6830" -> "3548 6846" [label="[-1]", style=dashed]; +"3548 6846" -> "3872 6847" [label="[-1]", style=dashed]; +"3549 6513" -> "3550 6515" [label="[]", style=solid]; +"3550 6515" -> "3551 6516" [label="[]", style=solid]; +"3551 6516" -> "3552 6517" [label="[-1]", style=dashed]; +"3552 6517" -> "3869 6519" [label="[]", style=dashed]; +"3553 6469" -> "3554 6471" [label="[]", style=solid]; +"3554 6471" -> "3555 6472" [label="[]", style=solid]; +"3555 6472" -> "3556 6473" [label="[-1]", style=dashed]; +"3556 6473" -> "3869 6519" [label="[]", style=dashed]; +"3557 6425" -> "3558 6427" [label="[]", style=solid]; +"3558 6427" -> "3559 6428" [label="[]", style=solid]; +"3559 6428" -> "3560 6429" [label="[-1]", style=dashed]; +"3560 6429" -> "3869 6519" [label="[]", style=dashed]; +"3561 6381" -> "3562 6383" [label="[]", style=solid]; +"3562 6383" -> "3563 6384" [label="[]", style=solid]; +"3563 6384" -> "3564 6385" [label="[-1]", style=dashed]; +"3564 6385" -> "3869 6519" [label="[]", style=dashed]; +"3565 6337" -> "3566 6339" [label="[]", style=solid]; +"3566 6339" -> "3567 6340" [label="[]", style=solid]; +"3567 6340" -> "3568 6341" [label="[-1]", style=dashed]; +"3568 6341" -> "3869 6519" [label="[]", style=dashed]; +"3569 6293" -> "3570 6295" [label="[]", style=solid]; +"3570 6295" -> "3571 6296" [label="[]", style=solid]; +"3571 6296" -> "3572 6297" [label="[-1]", style=dashed]; +"3572 6297" -> "3869 6519" [label="[]", style=dashed]; +"3573 6249" -> "3574 6251" [label="[]", style=solid]; +"3574 6251" -> "3575 6252" [label="[]", style=solid]; +"3575 6252" -> "3576 6253" [label="[-1]", style=dashed]; +"3576 6253" -> "3869 6519" [label="[]", style=dashed]; +"3577 6205" -> "3578 6207" [label="[]", style=solid]; +"3578 6207" -> "3579 6208" [label="[]", style=solid]; +"3579 6208" -> "3580 6209" [label="[-1]", style=dashed]; +"3580 6209" -> "3869 6519" [label="[]", style=dashed]; +"3581 6161" -> "3582 6163" [label="[]", style=solid]; +"3582 6163" -> "3583 6164" [label="[]", style=solid]; +"3583 6164" -> "3584 6165" [label="[-1]", style=dashed]; +"3584 6165" -> "3869 6519" [label="[]", style=dashed]; +"3585 6117" -> "3586 6119" [label="[]", style=solid]; +"3586 6119" -> "3587 6120" [label="[]", style=solid]; +"3587 6120" -> "3588 6121" [label="[-1]", style=dashed]; +"3588 6121" -> "3869 6519" [label="[]", style=dashed]; +"3589 6073" -> "3590 6075" [label="[]", style=solid]; +"3590 6075" -> "3591 6076" [label="[]", style=solid]; +"3591 6076" -> "3592 6077" [label="[-1]", style=dashed]; +"3592 6077" -> "3869 6519" [label="[]", style=dashed]; +"3593 6029" -> "3594 6031" [label="[]", style=solid]; +"3594 6031" -> "3595 6032" [label="[]", style=solid]; +"3595 6032" -> "3596 6033" [label="[-1]", style=dashed]; +"3596 6033" -> "3869 6519" [label="[]", style=dashed]; +"3597 5985" -> "3598 5987" [label="[]", style=solid]; +"3598 5987" -> "3599 5988" [label="[]", style=solid]; +"3599 5988" -> "3600 5989" [label="[-1]", style=dashed]; +"3600 5989" -> "3869 6519" [label="[]", style=dashed]; +"3601 5941" -> "3602 5943" [label="[]", style=solid]; +"3602 5943" -> "3603 5944" [label="[]", style=solid]; +"3603 5944" -> "3604 5945" [label="[-1]", style=dashed]; +"3604 5945" -> "3869 6519" [label="[]", style=dashed]; +"3605 5897" -> "3606 5899" [label="[]", style=solid]; +"3606 5899" -> "3607 5900" [label="[]", style=solid]; +"3607 5900" -> "3608 5901" [label="[-1]", style=dashed]; +"3608 5901" -> "3869 6519" [label="[]", style=dashed]; +"3609 5853" -> "3610 5855" [label="[]", style=solid]; +"3610 5855" -> "3611 5856" [label="[]", style=solid]; +"3611 5856" -> "3612 5857" [label="[-1]", style=dashed]; +"3612 5857" -> "3869 6519" [label="[]", style=dashed]; +"3613 5809" -> "3614 5811" [label="[]", style=solid]; +"3614 5811" -> "3615 5812" [label="[]", style=solid]; +"3615 5812" -> "3616 5813" [label="[-1]", style=dashed]; +"3616 5813" -> "3869 6519" [label="[]", style=dashed]; +"3617 5765" -> "3618 5767" [label="[]", style=solid]; +"3618 5767" -> "3619 5768" [label="[]", style=solid]; +"3619 5768" -> "3620 5769" [label="[-1]", style=dashed]; +"3620 5769" -> "3869 6519" [label="[]", style=dashed]; +"3621 5721" -> "3622 5723" [label="[]", style=solid]; +"3622 5723" -> "3623 5724" [label="[]", style=solid]; +"3623 5724" -> "3624 5725" [label="[-1]", style=dashed]; +"3624 5725" -> "3869 6519" [label="[]", style=dashed]; +"3625 5677" -> "3626 5679" [label="[]", style=solid]; +"3626 5679" -> "3627 5680" [label="[]", style=solid]; +"3627 5680" -> "3628 5681" [label="[-1]", style=dashed]; +"3628 5681" -> "3869 6519" [label="[]", style=dashed]; +"3629 5633" -> "3630 5635" [label="[]", style=solid]; +"3630 5635" -> "3631 5636" [label="[]", style=solid]; +"3631 5636" -> "3632 5637" [label="[-1]", style=dashed]; +"3632 5637" -> "3869 6519" [label="[]", style=dashed]; +"3633 5589" -> "3634 5591" [label="[]", style=solid]; +"3634 5591" -> "3635 5592" [label="[]", style=solid]; +"3635 5592" -> "3636 5593" [label="[-1]", style=dashed]; +"3636 5593" -> "3869 6519" [label="[]", style=dashed]; +"3637 5545" -> "3638 5547" [label="[]", style=solid]; +"3638 5547" -> "3639 5548" [label="[]", style=solid]; +"3639 5548" -> "3640 5549" [label="[-1]", style=dashed]; +"3640 5549" -> "3869 6519" [label="[]", style=dashed]; +"3641 5501" -> "3642 5503" [label="[]", style=solid]; +"3642 5503" -> "3643 5504" [label="[]", style=solid]; +"3643 5504" -> "3644 5505" [label="[-1]", style=dashed]; +"3644 5505" -> "3869 6519" [label="[]", style=dashed]; +"3645 5457" -> "3646 5459" [label="[]", style=solid]; +"3646 5459" -> "3647 5460" [label="[]", style=solid]; +"3647 5460" -> "3648 5461" [label="[-1]", style=dashed]; +"3648 5461" -> "3869 6519" [label="[]", style=dashed]; +"3649 5413" -> "3650 5415" [label="[]", style=solid]; +"3650 5415" -> "3651 5416" [label="[]", style=solid]; +"3651 5416" -> "3652 5417" [label="[-1]", style=dashed]; +"3652 5417" -> "3869 6519" [label="[]", style=dashed]; +"3653 5369" -> "3654 5371" [label="[]", style=solid]; +"3654 5371" -> "3655 5372" [label="[]", style=solid]; +"3655 5372" -> "3656 5373" [label="[-1]", style=dashed]; +"3656 5373" -> "3869 6519" [label="[]", style=dashed]; +"3657 5325" -> "3658 5327" [label="[]", style=solid]; +"3658 5327" -> "3659 5328" [label="[]", style=solid]; +"3659 5328" -> "3660 5329" [label="[-1]", style=dashed]; +"3660 5329" -> "3869 6519" [label="[]", style=dashed]; +"3661 5281" -> "3662 5283" [label="[]", style=solid]; +"3662 5283" -> "3663 5284" [label="[]", style=solid]; +"3663 5284" -> "3664 5285" [label="[-1]", style=dashed]; +"3664 5285" -> "3869 6519" [label="[]", style=dashed]; +"3665 5237" -> "3666 5239" [label="[]", style=solid]; +"3666 5239" -> "3667 5240" [label="[]", style=solid]; +"3667 5240" -> "3668 5241" [label="[-1]", style=dashed]; +"3668 5241" -> "3869 6519" [label="[]", style=dashed]; +"3669 5193" -> "3670 5195" [label="[]", style=solid]; +"3670 5195" -> "3671 5196" [label="[]", style=solid]; +"3671 5196" -> "3672 5197" [label="[-1]", style=dashed]; +"3672 5197" -> "3869 6519" [label="[]", style=dashed]; +"3673 5149" -> "3674 5151" [label="[]", style=solid]; +"3674 5151" -> "3675 5152" [label="[]", style=solid]; +"3675 5152" -> "3676 5153" [label="[-1]", style=dashed]; +"3676 5153" -> "3869 6519" [label="[]", style=dashed]; +"3677 5105" -> "3678 5107" [label="[]", style=solid]; +"3678 5107" -> "3679 5108" [label="[]", style=solid]; +"3679 5108" -> "3680 5109" [label="[-1]", style=dashed]; +"3680 5109" -> "3869 6519" [label="[]", style=dashed]; +"3681 5061" -> "3682 5063" [label="[]", style=solid]; +"3682 5063" -> "3683 5064" [label="[]", style=solid]; +"3683 5064" -> "3684 5065" [label="[-1]", style=dashed]; +"3684 5065" -> "3869 6519" [label="[]", style=dashed]; +"3685 5017" -> "3686 5019" [label="[]", style=solid]; +"3686 5019" -> "3687 5020" [label="[]", style=solid]; +"3687 5020" -> "3688 5021" [label="[-1]", style=dashed]; +"3688 5021" -> "3869 6519" [label="[]", style=dashed]; +"3689 4973" -> "3690 4975" [label="[]", style=solid]; +"3690 4975" -> "3691 4976" [label="[]", style=solid]; +"3691 4976" -> "3692 4977" [label="[-1]", style=dashed]; +"3692 4977" -> "3869 6519" [label="[]", style=dashed]; +"3693 4929" -> "3694 4931" [label="[]", style=solid]; +"3694 4931" -> "3695 4932" [label="[]", style=solid]; +"3695 4932" -> "3696 4933" [label="[-1]", style=dashed]; +"3696 4933" -> "3869 6519" [label="[]", style=dashed]; +"3697 4885" -> "3698 4887" [label="[]", style=solid]; +"3698 4887" -> "3699 4888" [label="[]", style=solid]; +"3699 4888" -> "3700 4889" [label="[-1]", style=dashed]; +"3700 4889" -> "3869 6519" [label="[]", style=dashed]; +"3701 4841" -> "3702 4843" [label="[]", style=solid]; +"3702 4843" -> "3703 4844" [label="[]", style=solid]; +"3703 4844" -> "3704 4845" [label="[-1]", style=dashed]; +"3704 4845" -> "3869 6519" [label="[]", style=dashed]; +"3705 4797" -> "3706 4799" [label="[]", style=solid]; +"3706 4799" -> "3707 4800" [label="[]", style=solid]; +"3707 4800" -> "3708 4801" [label="[-1]", style=dashed]; +"3708 4801" -> "3869 6519" [label="[]", style=dashed]; +"3709 4753" -> "3710 4755" [label="[]", style=solid]; +"3710 4755" -> "3711 4756" [label="[]", style=solid]; +"3711 4756" -> "3712 4757" [label="[-1]", style=dashed]; +"3712 4757" -> "3869 6519" [label="[]", style=dashed]; +"3713 4709" -> "3714 4711" [label="[]", style=solid]; +"3714 4711" -> "3715 4712" [label="[]", style=solid]; +"3715 4712" -> "3716 4713" [label="[-1]", style=dashed]; +"3716 4713" -> "3869 6519" [label="[]", style=dashed]; +"3717 4665" -> "3718 4667" [label="[]", style=solid]; +"3718 4667" -> "3719 4668" [label="[]", style=solid]; +"3719 4668" -> "3720 4669" [label="[-1]", style=dashed]; +"3720 4669" -> "3869 6519" [label="[]", style=dashed]; +"3721 4621" -> "3722 4623" [label="[]", style=solid]; +"3722 4623" -> "3723 4624" [label="[]", style=solid]; +"3723 4624" -> "3724 4625" [label="[-1]", style=dashed]; +"3724 4625" -> "3869 6519" [label="[]", style=dashed]; +"3725 4577" -> "3726 4579" [label="[]", style=solid]; +"3726 4579" -> "3727 4580" [label="[]", style=solid]; +"3727 4580" -> "3728 4581" [label="[-1]", style=dashed]; +"3728 4581" -> "3869 6519" [label="[]", style=dashed]; +"3729 4533" -> "3730 4535" [label="[]", style=solid]; +"3730 4535" -> "3731 4536" [label="[]", style=solid]; +"3731 4536" -> "3732 4537" [label="[-1]", style=dashed]; +"3732 4537" -> "3869 6519" [label="[]", style=dashed]; +"3733 4489" -> "3734 4491" [label="[]", style=solid]; +"3734 4491" -> "3735 4492" [label="[]", style=solid]; +"3735 4492" -> "3736 4493" [label="[-1]", style=dashed]; +"3736 4493" -> "3869 6519" [label="[]", style=dashed]; +"3737 4445" -> "3738 4447" [label="[]", style=solid]; +"3738 4447" -> "3739 4448" [label="[]", style=solid]; +"3739 4448" -> "3740 4449" [label="[-1]", style=dashed]; +"3740 4449" -> "3869 6519" [label="[]", style=dashed]; +"3741 4401" -> "3742 4403" [label="[]", style=solid]; +"3742 4403" -> "3743 4404" [label="[]", style=solid]; +"3743 4404" -> "3744 4405" [label="[-1]", style=dashed]; +"3744 4405" -> "3869 6519" [label="[]", style=dashed]; +"3745 4357" -> "3746 4359" [label="[]", style=solid]; +"3746 4359" -> "3747 4360" [label="[]", style=solid]; +"3747 4360" -> "3748 4361" [label="[-1]", style=dashed]; +"3748 4361" -> "3869 6519" [label="[]", style=dashed]; +"3749 4313" -> "3750 4315" [label="[]", style=solid]; +"3750 4315" -> "3751 4316" [label="[]", style=solid]; +"3751 4316" -> "3752 4317" [label="[-1]", style=dashed]; +"3752 4317" -> "3869 6519" [label="[]", style=dashed]; +"3753 4269" -> "3754 4271" [label="[]", style=solid]; +"3754 4271" -> "3755 4272" [label="[]", style=solid]; +"3755 4272" -> "3756 4273" [label="[-1]", style=dashed]; +"3756 4273" -> "3869 6519" [label="[]", style=dashed]; +"3757 4225" -> "3758 4227" [label="[]", style=solid]; +"3758 4227" -> "3759 4228" [label="[]", style=solid]; +"3759 4228" -> "3760 4229" [label="[-1]", style=dashed]; +"3760 4229" -> "3869 6519" [label="[]", style=dashed]; +"3761 4181" -> "3762 4183" [label="[]", style=solid]; +"3762 4183" -> "3763 4184" [label="[]", style=solid]; +"3763 4184" -> "3764 4185" [label="[-1]", style=dashed]; +"3764 4185" -> "3869 6519" [label="[]", style=dashed]; +"3765 4137" -> "3766 4139" [label="[]", style=solid]; +"3766 4139" -> "3767 4140" [label="[]", style=solid]; +"3767 4140" -> "3768 4141" [label="[-1]", style=dashed]; +"3768 4141" -> "3869 6519" [label="[]", style=dashed]; +"3769 4093" -> "3770 4095" [label="[]", style=solid]; +"3770 4095" -> "3771 4096" [label="[]", style=solid]; +"3771 4096" -> "3772 4097" [label="[-1]", style=dashed]; +"3772 4097" -> "3869 6519" [label="[]", style=dashed]; +"3773 4049" -> "3774 4051" [label="[]", style=solid]; +"3774 4051" -> "3775 4052" [label="[]", style=solid]; +"3775 4052" -> "3776 4053" [label="[-1]", style=dashed]; +"3776 4053" -> "3869 6519" [label="[]", style=dashed]; +"3777 4005" -> "3778 4007" [label="[]", style=solid]; +"3778 4007" -> "3779 4008" [label="[]", style=solid]; +"3779 4008" -> "3780 4009" [label="[-1]", style=dashed]; +"3780 4009" -> "3869 6519" [label="[]", style=dashed]; +"3781 3961" -> "3782 3963" [label="[]", style=solid]; +"3782 3963" -> "3783 3964" [label="[]", style=solid]; +"3783 3964" -> "3784 3965" [label="[-1]", style=dashed]; +"3784 3965" -> "3869 6519" [label="[]", style=dashed]; +"3785 3917" -> "3786 3919" [label="[]", style=solid]; +"3786 3919" -> "3787 3920" [label="[]", style=solid]; +"3787 3920" -> "3788 3921" [label="[-1]", style=dashed]; +"3788 3921" -> "3869 6519" [label="[]", style=dashed]; +"3789 3873" -> "3790 3875" [label="[]", style=solid]; +"3790 3875" -> "3791 3876" [label="[]", style=solid]; +"3791 3876" -> "3792 3877" [label="[-1]", style=dashed]; +"3792 3877" -> "3869 6519" [label="[]", style=dashed]; +"3793 3829" -> "3794 3831" [label="[]", style=solid]; +"3794 3831" -> "3795 3832" [label="[]", style=solid]; +"3795 3832" -> "3796 3833" [label="[-1]", style=dashed]; +"3796 3833" -> "3869 6519" [label="[]", style=dashed]; +"3797 3785" -> "3798 3787" [label="[]", style=solid]; +"3798 3787" -> "3799 3788" [label="[]", style=solid]; +"3799 3788" -> "3800 3789" [label="[-1]", style=dashed]; +"3800 3789" -> "3869 6519" [label="[]", style=dashed]; +"3801 3741" -> "3802 3743" [label="[]", style=solid]; +"3802 3743" -> "3803 3744" [label="[]", style=solid]; +"3803 3744" -> "3804 3745" [label="[-1]", style=dashed]; +"3804 3745" -> "3869 6519" [label="[]", style=dashed]; +"3805 3697" -> "3806 3699" [label="[]", style=solid]; +"3806 3699" -> "3807 3700" [label="[]", style=solid]; +"3807 3700" -> "3808 3701" [label="[-1]", style=dashed]; +"3808 3701" -> "3869 6519" [label="[]", style=dashed]; +"3809 3653" -> "3810 3655" [label="[]", style=solid]; +"3810 3655" -> "3811 3656" [label="[]", style=solid]; +"3811 3656" -> "3812 3657" [label="[-1]", style=dashed]; +"3812 3657" -> "3869 6519" [label="[]", style=dashed]; +"3813 3609" -> "3814 3611" [label="[]", style=solid]; +"3814 3611" -> "3815 3612" [label="[]", style=solid]; +"3815 3612" -> "3816 3613" [label="[-1]", style=dashed]; +"3816 3613" -> "3869 6519" [label="[]", style=dashed]; +"3817 3565" -> "3818 3567" [label="[]", style=solid]; +"3818 3567" -> "3819 3568" [label="[]", style=solid]; +"3819 3568" -> "3820 3569" [label="[-1]", style=dashed]; +"3820 3569" -> "3869 6519" [label="[]", style=dashed]; +"3821 3521" -> "3822 3523" [label="[]", style=solid]; +"3822 3523" -> "3823 3524" [label="[]", style=solid]; +"3823 3524" -> "3824 3525" [label="[-1]", style=dashed]; +"3824 3525" -> "3869 6519" [label="[]", style=dashed]; +"3825 3477" -> "3826 3479" [label="[]", style=solid]; +"3826 3479" -> "3827 3480" [label="[]", style=solid]; +"3827 3480" -> "3828 3481" [label="[-1]", style=dashed]; +"3828 3481" -> "3869 6519" [label="[]", style=dashed]; +"3829 3433" -> "3830 3435" [label="[]", style=solid]; +"3830 3435" -> "3831 3436" [label="[]", style=solid]; +"3831 3436" -> "3832 3437" [label="[-1]", style=dashed]; +"3832 3437" -> "3869 6519" [label="[]", style=dashed]; +"3833 3389" -> "3834 3391" [label="[]", style=solid]; +"3834 3391" -> "3835 3392" [label="[]", style=solid]; +"3835 3392" -> "3836 3393" [label="[-1]", style=dashed]; +"3836 3393" -> "3869 6519" [label="[]", style=dashed]; +"3837 3345" -> "3838 3347" [label="[]", style=solid]; +"3838 3347" -> "3839 3348" [label="[]", style=solid]; +"3839 3348" -> "3840 3349" [label="[-1]", style=dashed]; +"3840 3349" -> "3869 6519" [label="[]", style=dashed]; +"3841 3301" -> "3842 3303" [label="[]", style=solid]; +"3842 3303" -> "3843 3304" [label="[]", style=solid]; +"3843 3304" -> "3844 3305" [label="[-1]", style=dashed]; +"3844 3305" -> "3869 6519" [label="[]", style=dashed]; +"3845 3257" -> "3846 3259" [label="[]", style=solid]; +"3846 3259" -> "3847 3260" [label="[]", style=solid]; +"3847 3260" -> "3848 3261" [label="[-1]", style=dashed]; +"3848 3261" -> "3869 6519" [label="[]", style=dashed]; +"3849 3213" -> "3850 3215" [label="[]", style=solid]; +"3850 3215" -> "3851 3216" [label="[]", style=solid]; +"3851 3216" -> "3852 3217" [label="[-1]", style=dashed]; +"3852 3217" -> "3869 6519" [label="[]", style=dashed]; +"3853 3169" -> "3854 3171" [label="[]", style=solid]; +"3854 3171" -> "3855 3172" [label="[]", style=solid]; +"3855 3172" -> "3856 3173" [label="[-1]", style=dashed]; +"3856 3173" -> "3869 6519" [label="[]", style=dashed]; +"3857 3125" -> "3858 3127" [label="[]", style=solid]; +"3858 3127" -> "3859 3128" [label="[]", style=solid]; +"3859 3128" -> "3860 3129" [label="[-1]", style=dashed]; +"3860 3129" -> "3869 6519" [label="[]", style=dashed]; +"3861 3081" -> "3862 3083" [label="[]", style=solid]; +"3862 3083" -> "3863 3084" [label="[]", style=solid]; +"3863 3084" -> "3864 3085" [label="[-1]", style=dashed]; +"3864 3085" -> "3869 6519" [label="[]", style=dashed]; +"3865 3037" -> "3866 3039" [label="[]", style=solid]; +"3866 3039" -> "3867 3040" [label="[]", style=solid]; +"3867 3040" -> "3868 3041" [label="[-1]", style=dashed]; +"3868 3041" -> "3869 6519" [label="[]", style=dashed]; +"3869 6519" -> "3870 6532" [label="[]", style=dashed]; +"3870 6532" -> "3871 6820" [label="[-1]", style=dashed]; +"3870 6532" -> "3888 nncf_model_output_1" [label="[-1]", style=dashed]; +"3871 6820" -> "3872 6847" [label="[-1]", style=dashed]; +"3872 6847" -> "3882 6848" [label="[-1]", style=dashed]; "3873 6835" -> "3874 6836" [label="[4]", style=dashed]; "3874 6836" -> "3875 6840" [label="[]", style=dashed]; "3875 6840" -> "3880 6841" [label="[1]", style=dashed]; @@ -8816,10 +8816,10 @@ strict digraph { "3878 6839" -> "3880 6841" [label="[1]", style=dashed]; "3879 6838" -> "3880 6841" [label="[1]", style=dashed]; "3880 6841" -> "3881 6842" [label="[3]", style=dashed]; -"3881 6842" -> "3882 6848" [label="[1]", style=solid]; -"3882 6848" -> "3883 6849" [label="[1]", style=solid]; -"3883 6849" -> "3890 nncf_model_output_3" [label="[1, 1, 28, 28]", style=solid]; -"3884 6533" -> "3885 6534" [label="[1]", style=dashed]; -"3885 6534" -> "3889 nncf_model_output_2" [label="[1]", style=solid]; -"3886 nncf_model_input_0" -> "2 0" [label="[3, 1, 1]", style=solid]; +"3881 6842" -> "3882 6848" [label="[]", style=solid]; +"3882 6848" -> "3883 6849" [label="[]", style=solid]; +"3883 6849" -> "3890 nncf_model_output_3" [label="[-1, 1, 28, 28]", style=solid]; +"3884 6533" -> "3885 6534" [label="[]", style=dashed]; +"3885 6534" -> "3889 nncf_model_output_2" [label="[-1]", style=solid]; +"3886 nncf_model_input_0" -> "2 0" [label="[3, -1, -1]", style=solid]; } diff --git a/tests/onnx/data/reference_graphs/original_nncf_graph/fcn-resnet50-12.dot b/tests/onnx/data/reference_graphs/original_nncf_graph/fcn-resnet50-12.dot index 95a506ba359..83c462a9b2c 100644 --- a/tests/onnx/data/reference_graphs/original_nncf_graph/fcn-resnet50-12.dot +++ b/tests/onnx/data/reference_graphs/original_nncf_graph/fcn-resnet50-12.dot @@ -171,146 +171,146 @@ strict digraph { "4 Constant_4" -> "5 Gather_5" [label="[]", style=dashed]; "5 Gather_5" -> "131 Unsqueeze_129" [label="[]", style=dashed]; "5 Gather_5" -> "149 Unsqueeze_145" [label="[]", style=dashed]; -"6 Conv_6" -> "7 Relu_7" [label="[1, 64, 1, 1]", style=solid]; -"7 Relu_7" -> "8 MaxPool_8" [label="[1, 64, 1, 1]", style=solid]; -"8 MaxPool_8" -> "9 Conv_9" [label="[1, 64, 1, 1]", style=solid]; -"8 MaxPool_8" -> "14 Conv_14" [label="[1, 64, 1, 1]", style=solid]; -"9 Conv_9" -> "10 Relu_10" [label="[1, 64, 1, 1]", style=solid]; -"10 Relu_10" -> "11 Conv_11" [label="[1, 64, 1, 1]", style=solid]; -"11 Conv_11" -> "12 Relu_12" [label="[1, 64, 1, 1]", style=solid]; -"12 Relu_12" -> "13 Conv_13" [label="[1, 64, 1, 1]", style=solid]; -"13 Conv_13" -> "15 Add_15" [label="[1, 256, 1, 1]", style=solid]; -"14 Conv_14" -> "15 Add_15" [label="[1, 256, 1, 1]", style=solid]; -"15 Add_15" -> "16 Relu_16" [label="[1, 256, 1, 1]", style=solid]; -"16 Relu_16" -> "17 Conv_17" [label="[1, 256, 1, 1]", style=solid]; -"16 Relu_16" -> "22 Add_22" [label="[1, 256, 1, 1]", style=solid]; -"17 Conv_17" -> "18 Relu_18" [label="[1, 64, 1, 1]", style=solid]; -"18 Relu_18" -> "19 Conv_19" [label="[1, 64, 1, 1]", style=solid]; -"19 Conv_19" -> "20 Relu_20" [label="[1, 64, 1, 1]", style=solid]; -"20 Relu_20" -> "21 Conv_21" [label="[1, 64, 1, 1]", style=solid]; -"21 Conv_21" -> "22 Add_22" [label="[1, 256, 1, 1]", style=solid]; -"22 Add_22" -> "23 Relu_23" [label="[1, 256, 1, 1]", style=solid]; -"23 Relu_23" -> "24 Conv_24" [label="[1, 256, 1, 1]", style=solid]; -"23 Relu_23" -> "29 Add_29" [label="[1, 256, 1, 1]", style=solid]; -"24 Conv_24" -> "25 Relu_25" [label="[1, 64, 1, 1]", style=solid]; -"25 Relu_25" -> "26 Conv_26" [label="[1, 64, 1, 1]", style=solid]; -"26 Conv_26" -> "27 Relu_27" [label="[1, 64, 1, 1]", style=solid]; -"27 Relu_27" -> "28 Conv_28" [label="[1, 64, 1, 1]", style=solid]; -"28 Conv_28" -> "29 Add_29" [label="[1, 256, 1, 1]", style=solid]; -"29 Add_29" -> "30 Relu_30" [label="[1, 256, 1, 1]", style=solid]; -"30 Relu_30" -> "31 Conv_31" [label="[1, 256, 1, 1]", style=solid]; -"30 Relu_30" -> "36 Conv_36" [label="[1, 256, 1, 1]", style=solid]; -"31 Conv_31" -> "32 Relu_32" [label="[1, 128, 1, 1]", style=solid]; -"32 Relu_32" -> "33 Conv_33" [label="[1, 128, 1, 1]", style=solid]; -"33 Conv_33" -> "34 Relu_34" [label="[1, 128, 1, 1]", style=solid]; -"34 Relu_34" -> "35 Conv_35" [label="[1, 128, 1, 1]", style=solid]; -"35 Conv_35" -> "37 Add_37" [label="[1, 512, 1, 1]", style=solid]; -"36 Conv_36" -> "37 Add_37" [label="[1, 512, 1, 1]", style=solid]; -"37 Add_37" -> "38 Relu_38" [label="[1, 512, 1, 1]", style=solid]; -"38 Relu_38" -> "39 Conv_39" [label="[1, 512, 1, 1]", style=solid]; -"38 Relu_38" -> "44 Add_44" [label="[1, 512, 1, 1]", style=solid]; -"39 Conv_39" -> "40 Relu_40" [label="[1, 128, 1, 1]", style=solid]; -"40 Relu_40" -> "41 Conv_41" [label="[1, 128, 1, 1]", style=solid]; -"41 Conv_41" -> "42 Relu_42" [label="[1, 128, 1, 1]", style=solid]; -"42 Relu_42" -> "43 Conv_43" [label="[1, 128, 1, 1]", style=solid]; -"43 Conv_43" -> "44 Add_44" [label="[1, 512, 1, 1]", style=solid]; -"44 Add_44" -> "45 Relu_45" [label="[1, 512, 1, 1]", style=solid]; -"45 Relu_45" -> "46 Conv_46" [label="[1, 512, 1, 1]", style=solid]; -"45 Relu_45" -> "51 Add_51" [label="[1, 512, 1, 1]", style=solid]; -"46 Conv_46" -> "47 Relu_47" [label="[1, 128, 1, 1]", style=solid]; -"47 Relu_47" -> "48 Conv_48" [label="[1, 128, 1, 1]", style=solid]; -"48 Conv_48" -> "49 Relu_49" [label="[1, 128, 1, 1]", style=solid]; -"49 Relu_49" -> "50 Conv_50" [label="[1, 128, 1, 1]", style=solid]; -"50 Conv_50" -> "51 Add_51" [label="[1, 512, 1, 1]", style=solid]; -"51 Add_51" -> "52 Relu_52" [label="[1, 512, 1, 1]", style=solid]; -"52 Relu_52" -> "53 Conv_53" [label="[1, 512, 1, 1]", style=solid]; -"52 Relu_52" -> "58 Add_58" [label="[1, 512, 1, 1]", style=solid]; -"53 Conv_53" -> "54 Relu_54" [label="[1, 128, 1, 1]", style=solid]; -"54 Relu_54" -> "55 Conv_55" [label="[1, 128, 1, 1]", style=solid]; -"55 Conv_55" -> "56 Relu_56" [label="[1, 128, 1, 1]", style=solid]; -"56 Relu_56" -> "57 Conv_57" [label="[1, 128, 1, 1]", style=solid]; -"57 Conv_57" -> "58 Add_58" [label="[1, 512, 1, 1]", style=solid]; -"58 Add_58" -> "59 Relu_59" [label="[1, 512, 1, 1]", style=solid]; -"59 Relu_59" -> "60 Conv_60" [label="[1, 512, 1, 1]", style=solid]; -"59 Relu_59" -> "65 Conv_65" [label="[1, 512, 1, 1]", style=solid]; -"60 Conv_60" -> "61 Relu_61" [label="[1, 256, 1, 1]", style=solid]; -"61 Relu_61" -> "62 Conv_62" [label="[1, 256, 1, 1]", style=solid]; -"62 Conv_62" -> "63 Relu_63" [label="[1, 256, 1, 1]", style=solid]; -"63 Relu_63" -> "64 Conv_64" [label="[1, 256, 1, 1]", style=solid]; -"64 Conv_64" -> "66 Add_66" [label="[1, 1024, 1, 1]", style=solid]; -"65 Conv_65" -> "66 Add_66" [label="[1, 1024, 1, 1]", style=solid]; -"66 Add_66" -> "67 Relu_67" [label="[1, 1024, 1, 1]", style=solid]; -"67 Relu_67" -> "68 Conv_68" [label="[1, 1024, 1, 1]", style=solid]; -"67 Relu_67" -> "73 Add_73" [label="[1, 1024, 1, 1]", style=solid]; -"68 Conv_68" -> "69 Relu_69" [label="[1, 256, 1, 1]", style=solid]; -"69 Relu_69" -> "70 Conv_70" [label="[1, 256, 1, 1]", style=solid]; -"70 Conv_70" -> "71 Relu_71" [label="[1, 256, 1, 1]", style=solid]; -"71 Relu_71" -> "72 Conv_72" [label="[1, 256, 1, 1]", style=solid]; -"72 Conv_72" -> "73 Add_73" [label="[1, 1024, 1, 1]", style=solid]; -"73 Add_73" -> "74 Relu_74" [label="[1, 1024, 1, 1]", style=solid]; -"74 Relu_74" -> "75 Conv_75" [label="[1, 1024, 1, 1]", style=solid]; -"74 Relu_74" -> "80 Add_80" [label="[1, 1024, 1, 1]", style=solid]; -"75 Conv_75" -> "76 Relu_76" [label="[1, 256, 1, 1]", style=solid]; -"76 Relu_76" -> "77 Conv_77" [label="[1, 256, 1, 1]", style=solid]; -"77 Conv_77" -> "78 Relu_78" [label="[1, 256, 1, 1]", style=solid]; -"78 Relu_78" -> "79 Conv_79" [label="[1, 256, 1, 1]", style=solid]; -"79 Conv_79" -> "80 Add_80" [label="[1, 1024, 1, 1]", style=solid]; -"80 Add_80" -> "81 Relu_81" [label="[1, 1024, 1, 1]", style=solid]; -"81 Relu_81" -> "82 Conv_82" [label="[1, 1024, 1, 1]", style=solid]; -"81 Relu_81" -> "87 Add_87" [label="[1, 1024, 1, 1]", style=solid]; -"82 Conv_82" -> "83 Relu_83" [label="[1, 256, 1, 1]", style=solid]; -"83 Relu_83" -> "84 Conv_84" [label="[1, 256, 1, 1]", style=solid]; -"84 Conv_84" -> "85 Relu_85" [label="[1, 256, 1, 1]", style=solid]; -"85 Relu_85" -> "86 Conv_86" [label="[1, 256, 1, 1]", style=solid]; -"86 Conv_86" -> "87 Add_87" [label="[1, 1024, 1, 1]", style=solid]; -"87 Add_87" -> "88 Relu_88" [label="[1, 1024, 1, 1]", style=solid]; -"88 Relu_88" -> "89 Conv_89" [label="[1, 1024, 1, 1]", style=solid]; -"88 Relu_88" -> "94 Add_94" [label="[1, 1024, 1, 1]", style=solid]; -"89 Conv_89" -> "90 Relu_90" [label="[1, 256, 1, 1]", style=solid]; -"90 Relu_90" -> "91 Conv_91" [label="[1, 256, 1, 1]", style=solid]; -"91 Conv_91" -> "92 Relu_92" [label="[1, 256, 1, 1]", style=solid]; -"92 Relu_92" -> "93 Conv_93" [label="[1, 256, 1, 1]", style=solid]; -"93 Conv_93" -> "94 Add_94" [label="[1, 1024, 1, 1]", style=solid]; -"94 Add_94" -> "95 Relu_95" [label="[1, 1024, 1, 1]", style=solid]; -"95 Relu_95" -> "96 Conv_96" [label="[1, 1024, 1, 1]", style=solid]; -"95 Relu_95" -> "101 Add_101" [label="[1, 1024, 1, 1]", style=solid]; -"96 Conv_96" -> "97 Relu_97" [label="[1, 256, 1, 1]", style=solid]; -"97 Relu_97" -> "98 Conv_98" [label="[1, 256, 1, 1]", style=solid]; -"98 Conv_98" -> "99 Relu_99" [label="[1, 256, 1, 1]", style=solid]; -"99 Relu_99" -> "100 Conv_100" [label="[1, 256, 1, 1]", style=solid]; -"100 Conv_100" -> "101 Add_101" [label="[1, 1024, 1, 1]", style=solid]; -"101 Add_101" -> "102 Relu_102" [label="[1, 1024, 1, 1]", style=solid]; -"102 Relu_102" -> "103 Conv_103" [label="[1, 1024, 1, 1]", style=solid]; -"102 Relu_102" -> "108 Conv_108" [label="[1, 1024, 1, 1]", style=solid]; -"102 Relu_102" -> "143 Conv_141" [label="[1, 1024, 1, 1]", style=solid]; -"103 Conv_103" -> "104 Relu_104" [label="[1, 512, 1, 1]", style=solid]; -"104 Relu_104" -> "105 Conv_105" [label="[1, 512, 1, 1]", style=solid]; -"105 Conv_105" -> "106 Relu_106" [label="[1, 512, 1, 1]", style=solid]; -"106 Relu_106" -> "107 Conv_107" [label="[1, 512, 1, 1]", style=solid]; -"107 Conv_107" -> "109 Add_109" [label="[1, 2048, 1, 1]", style=solid]; -"108 Conv_108" -> "109 Add_109" [label="[1, 2048, 1, 1]", style=solid]; -"109 Add_109" -> "110 Relu_110" [label="[1, 2048, 1, 1]", style=solid]; -"110 Relu_110" -> "111 Conv_111" [label="[1, 2048, 1, 1]", style=solid]; -"110 Relu_110" -> "116 Add_116" [label="[1, 2048, 1, 1]", style=solid]; -"111 Conv_111" -> "112 Relu_112" [label="[1, 512, 1, 1]", style=solid]; -"112 Relu_112" -> "113 Conv_113" [label="[1, 512, 1, 1]", style=solid]; -"113 Conv_113" -> "114 Relu_114" [label="[1, 512, 1, 1]", style=solid]; -"114 Relu_114" -> "115 Conv_115" [label="[1, 512, 1, 1]", style=solid]; -"115 Conv_115" -> "116 Add_116" [label="[1, 2048, 1, 1]", style=solid]; -"116 Add_116" -> "117 Relu_117" [label="[1, 2048, 1, 1]", style=solid]; -"117 Relu_117" -> "118 Conv_118" [label="[1, 2048, 1, 1]", style=solid]; -"117 Relu_117" -> "123 Add_123" [label="[1, 2048, 1, 1]", style=solid]; -"118 Conv_118" -> "119 Relu_119" [label="[1, 512, 1, 1]", style=solid]; -"119 Relu_119" -> "120 Conv_120" [label="[1, 512, 1, 1]", style=solid]; -"120 Conv_120" -> "121 Relu_121" [label="[1, 512, 1, 1]", style=solid]; -"121 Relu_121" -> "122 Conv_122" [label="[1, 512, 1, 1]", style=solid]; -"122 Conv_122" -> "123 Add_123" [label="[1, 2048, 1, 1]", style=solid]; -"123 Add_123" -> "124 Relu_124" [label="[1, 2048, 1, 1]", style=solid]; -"124 Relu_124" -> "125 Conv_125" [label="[1, 2048, 1, 1]", style=solid]; -"125 Conv_125" -> "126 Relu_126" [label="[1, 512, 1, 1]", style=solid]; -"126 Relu_126" -> "127 Conv_127" [label="[1, 512, 1, 1]", style=solid]; -"127 Conv_127" -> "134 Shape_132" [label="[1, 21, 1, 1]", style=solid]; -"127 Conv_127" -> "142 Resize_140" [label="[1, 21, 1, 1]", style=solid]; +"6 Conv_6" -> "7 Relu_7" [label="[-1, 64, -1, -1]", style=solid]; +"7 Relu_7" -> "8 MaxPool_8" [label="[-1, 64, -1, -1]", style=solid]; +"8 MaxPool_8" -> "9 Conv_9" [label="[-1, 64, -1, -1]", style=solid]; +"8 MaxPool_8" -> "14 Conv_14" [label="[-1, 64, -1, -1]", style=solid]; +"9 Conv_9" -> "10 Relu_10" [label="[-1, 64, -1, -1]", style=solid]; +"10 Relu_10" -> "11 Conv_11" [label="[-1, 64, -1, -1]", style=solid]; +"11 Conv_11" -> "12 Relu_12" [label="[-1, 64, -1, -1]", style=solid]; +"12 Relu_12" -> "13 Conv_13" [label="[-1, 64, -1, -1]", style=solid]; +"13 Conv_13" -> "15 Add_15" [label="[-1, 256, -1, -1]", style=solid]; +"14 Conv_14" -> "15 Add_15" [label="[-1, 256, -1, -1]", style=solid]; +"15 Add_15" -> "16 Relu_16" [label="[-1, 256, -1, -1]", style=solid]; +"16 Relu_16" -> "17 Conv_17" [label="[-1, 256, -1, -1]", style=solid]; +"16 Relu_16" -> "22 Add_22" [label="[-1, 256, -1, -1]", style=solid]; +"17 Conv_17" -> "18 Relu_18" [label="[-1, 64, -1, -1]", style=solid]; +"18 Relu_18" -> "19 Conv_19" [label="[-1, 64, -1, -1]", style=solid]; +"19 Conv_19" -> "20 Relu_20" [label="[-1, 64, -1, -1]", style=solid]; +"20 Relu_20" -> "21 Conv_21" [label="[-1, 64, -1, -1]", style=solid]; +"21 Conv_21" -> "22 Add_22" [label="[-1, 256, -1, -1]", style=solid]; +"22 Add_22" -> "23 Relu_23" [label="[-1, 256, -1, -1]", style=solid]; +"23 Relu_23" -> "24 Conv_24" [label="[-1, 256, -1, -1]", style=solid]; +"23 Relu_23" -> "29 Add_29" [label="[-1, 256, -1, -1]", style=solid]; +"24 Conv_24" -> "25 Relu_25" [label="[-1, 64, -1, -1]", style=solid]; +"25 Relu_25" -> "26 Conv_26" [label="[-1, 64, -1, -1]", style=solid]; +"26 Conv_26" -> "27 Relu_27" [label="[-1, 64, -1, -1]", style=solid]; +"27 Relu_27" -> "28 Conv_28" [label="[-1, 64, -1, -1]", style=solid]; +"28 Conv_28" -> "29 Add_29" [label="[-1, 256, -1, -1]", style=solid]; +"29 Add_29" -> "30 Relu_30" [label="[-1, 256, -1, -1]", style=solid]; +"30 Relu_30" -> "31 Conv_31" [label="[-1, 256, -1, -1]", style=solid]; +"30 Relu_30" -> "36 Conv_36" [label="[-1, 256, -1, -1]", style=solid]; +"31 Conv_31" -> "32 Relu_32" [label="[-1, 128, -1, -1]", style=solid]; +"32 Relu_32" -> "33 Conv_33" [label="[-1, 128, -1, -1]", style=solid]; +"33 Conv_33" -> "34 Relu_34" [label="[-1, 128, -1, -1]", style=solid]; +"34 Relu_34" -> "35 Conv_35" [label="[-1, 128, -1, -1]", style=solid]; +"35 Conv_35" -> "37 Add_37" [label="[-1, 512, -1, -1]", style=solid]; +"36 Conv_36" -> "37 Add_37" [label="[-1, 512, -1, -1]", style=solid]; +"37 Add_37" -> "38 Relu_38" [label="[-1, 512, -1, -1]", style=solid]; +"38 Relu_38" -> "39 Conv_39" [label="[-1, 512, -1, -1]", style=solid]; +"38 Relu_38" -> "44 Add_44" [label="[-1, 512, -1, -1]", style=solid]; +"39 Conv_39" -> "40 Relu_40" [label="[-1, 128, -1, -1]", style=solid]; +"40 Relu_40" -> "41 Conv_41" [label="[-1, 128, -1, -1]", style=solid]; +"41 Conv_41" -> "42 Relu_42" [label="[-1, 128, -1, -1]", style=solid]; +"42 Relu_42" -> "43 Conv_43" [label="[-1, 128, -1, -1]", style=solid]; +"43 Conv_43" -> "44 Add_44" [label="[-1, 512, -1, -1]", style=solid]; +"44 Add_44" -> "45 Relu_45" [label="[-1, 512, -1, -1]", style=solid]; +"45 Relu_45" -> "46 Conv_46" [label="[-1, 512, -1, -1]", style=solid]; +"45 Relu_45" -> "51 Add_51" [label="[-1, 512, -1, -1]", style=solid]; +"46 Conv_46" -> "47 Relu_47" [label="[-1, 128, -1, -1]", style=solid]; +"47 Relu_47" -> "48 Conv_48" [label="[-1, 128, -1, -1]", style=solid]; +"48 Conv_48" -> "49 Relu_49" [label="[-1, 128, -1, -1]", style=solid]; +"49 Relu_49" -> "50 Conv_50" [label="[-1, 128, -1, -1]", style=solid]; +"50 Conv_50" -> "51 Add_51" [label="[-1, 512, -1, -1]", style=solid]; +"51 Add_51" -> "52 Relu_52" [label="[-1, 512, -1, -1]", style=solid]; +"52 Relu_52" -> "53 Conv_53" [label="[-1, 512, -1, -1]", style=solid]; +"52 Relu_52" -> "58 Add_58" [label="[-1, 512, -1, -1]", style=solid]; +"53 Conv_53" -> "54 Relu_54" [label="[-1, 128, -1, -1]", style=solid]; +"54 Relu_54" -> "55 Conv_55" [label="[-1, 128, -1, -1]", style=solid]; +"55 Conv_55" -> "56 Relu_56" [label="[-1, 128, -1, -1]", style=solid]; +"56 Relu_56" -> "57 Conv_57" [label="[-1, 128, -1, -1]", style=solid]; +"57 Conv_57" -> "58 Add_58" [label="[-1, 512, -1, -1]", style=solid]; +"58 Add_58" -> "59 Relu_59" [label="[-1, 512, -1, -1]", style=solid]; +"59 Relu_59" -> "60 Conv_60" [label="[-1, 512, -1, -1]", style=solid]; +"59 Relu_59" -> "65 Conv_65" [label="[-1, 512, -1, -1]", style=solid]; +"60 Conv_60" -> "61 Relu_61" [label="[-1, 256, -1, -1]", style=solid]; +"61 Relu_61" -> "62 Conv_62" [label="[-1, 256, -1, -1]", style=solid]; +"62 Conv_62" -> "63 Relu_63" [label="[-1, 256, -1, -1]", style=solid]; +"63 Relu_63" -> "64 Conv_64" [label="[-1, 256, -1, -1]", style=solid]; +"64 Conv_64" -> "66 Add_66" [label="[-1, 1024, -1, -1]", style=solid]; +"65 Conv_65" -> "66 Add_66" [label="[-1, 1024, -1, -1]", style=solid]; +"66 Add_66" -> "67 Relu_67" [label="[-1, 1024, -1, -1]", style=solid]; +"67 Relu_67" -> "68 Conv_68" [label="[-1, 1024, -1, -1]", style=solid]; +"67 Relu_67" -> "73 Add_73" [label="[-1, 1024, -1, -1]", style=solid]; +"68 Conv_68" -> "69 Relu_69" [label="[-1, 256, -1, -1]", style=solid]; +"69 Relu_69" -> "70 Conv_70" [label="[-1, 256, -1, -1]", style=solid]; +"70 Conv_70" -> "71 Relu_71" [label="[-1, 256, -1, -1]", style=solid]; +"71 Relu_71" -> "72 Conv_72" [label="[-1, 256, -1, -1]", style=solid]; +"72 Conv_72" -> "73 Add_73" [label="[-1, 1024, -1, -1]", style=solid]; +"73 Add_73" -> "74 Relu_74" [label="[-1, 1024, -1, -1]", style=solid]; +"74 Relu_74" -> "75 Conv_75" [label="[-1, 1024, -1, -1]", style=solid]; +"74 Relu_74" -> "80 Add_80" [label="[-1, 1024, -1, -1]", style=solid]; +"75 Conv_75" -> "76 Relu_76" [label="[-1, 256, -1, -1]", style=solid]; +"76 Relu_76" -> "77 Conv_77" [label="[-1, 256, -1, -1]", style=solid]; +"77 Conv_77" -> "78 Relu_78" [label="[-1, 256, -1, -1]", style=solid]; +"78 Relu_78" -> "79 Conv_79" [label="[-1, 256, -1, -1]", style=solid]; +"79 Conv_79" -> "80 Add_80" [label="[-1, 1024, -1, -1]", style=solid]; +"80 Add_80" -> "81 Relu_81" [label="[-1, 1024, -1, -1]", style=solid]; +"81 Relu_81" -> "82 Conv_82" [label="[-1, 1024, -1, -1]", style=solid]; +"81 Relu_81" -> "87 Add_87" [label="[-1, 1024, -1, -1]", style=solid]; +"82 Conv_82" -> "83 Relu_83" [label="[-1, 256, -1, -1]", style=solid]; +"83 Relu_83" -> "84 Conv_84" [label="[-1, 256, -1, -1]", style=solid]; +"84 Conv_84" -> "85 Relu_85" [label="[-1, 256, -1, -1]", style=solid]; +"85 Relu_85" -> "86 Conv_86" [label="[-1, 256, -1, -1]", style=solid]; +"86 Conv_86" -> "87 Add_87" [label="[-1, 1024, -1, -1]", style=solid]; +"87 Add_87" -> "88 Relu_88" [label="[-1, 1024, -1, -1]", style=solid]; +"88 Relu_88" -> "89 Conv_89" [label="[-1, 1024, -1, -1]", style=solid]; +"88 Relu_88" -> "94 Add_94" [label="[-1, 1024, -1, -1]", style=solid]; +"89 Conv_89" -> "90 Relu_90" [label="[-1, 256, -1, -1]", style=solid]; +"90 Relu_90" -> "91 Conv_91" [label="[-1, 256, -1, -1]", style=solid]; +"91 Conv_91" -> "92 Relu_92" [label="[-1, 256, -1, -1]", style=solid]; +"92 Relu_92" -> "93 Conv_93" [label="[-1, 256, -1, -1]", style=solid]; +"93 Conv_93" -> "94 Add_94" [label="[-1, 1024, -1, -1]", style=solid]; +"94 Add_94" -> "95 Relu_95" [label="[-1, 1024, -1, -1]", style=solid]; +"95 Relu_95" -> "96 Conv_96" [label="[-1, 1024, -1, -1]", style=solid]; +"95 Relu_95" -> "101 Add_101" [label="[-1, 1024, -1, -1]", style=solid]; +"96 Conv_96" -> "97 Relu_97" [label="[-1, 256, -1, -1]", style=solid]; +"97 Relu_97" -> "98 Conv_98" [label="[-1, 256, -1, -1]", style=solid]; +"98 Conv_98" -> "99 Relu_99" [label="[-1, 256, -1, -1]", style=solid]; +"99 Relu_99" -> "100 Conv_100" [label="[-1, 256, -1, -1]", style=solid]; +"100 Conv_100" -> "101 Add_101" [label="[-1, 1024, -1, -1]", style=solid]; +"101 Add_101" -> "102 Relu_102" [label="[-1, 1024, -1, -1]", style=solid]; +"102 Relu_102" -> "103 Conv_103" [label="[-1, 1024, -1, -1]", style=solid]; +"102 Relu_102" -> "108 Conv_108" [label="[-1, 1024, -1, -1]", style=solid]; +"102 Relu_102" -> "143 Conv_141" [label="[-1, 1024, -1, -1]", style=solid]; +"103 Conv_103" -> "104 Relu_104" [label="[-1, 512, -1, -1]", style=solid]; +"104 Relu_104" -> "105 Conv_105" [label="[-1, 512, -1, -1]", style=solid]; +"105 Conv_105" -> "106 Relu_106" [label="[-1, 512, -1, -1]", style=solid]; +"106 Relu_106" -> "107 Conv_107" [label="[-1, 512, -1, -1]", style=solid]; +"107 Conv_107" -> "109 Add_109" [label="[-1, 2048, -1, -1]", style=solid]; +"108 Conv_108" -> "109 Add_109" [label="[-1, 2048, -1, -1]", style=solid]; +"109 Add_109" -> "110 Relu_110" [label="[-1, 2048, -1, -1]", style=solid]; +"110 Relu_110" -> "111 Conv_111" [label="[-1, 2048, -1, -1]", style=solid]; +"110 Relu_110" -> "116 Add_116" [label="[-1, 2048, -1, -1]", style=solid]; +"111 Conv_111" -> "112 Relu_112" [label="[-1, 512, -1, -1]", style=solid]; +"112 Relu_112" -> "113 Conv_113" [label="[-1, 512, -1, -1]", style=solid]; +"113 Conv_113" -> "114 Relu_114" [label="[-1, 512, -1, -1]", style=solid]; +"114 Relu_114" -> "115 Conv_115" [label="[-1, 512, -1, -1]", style=solid]; +"115 Conv_115" -> "116 Add_116" [label="[-1, 2048, -1, -1]", style=solid]; +"116 Add_116" -> "117 Relu_117" [label="[-1, 2048, -1, -1]", style=solid]; +"117 Relu_117" -> "118 Conv_118" [label="[-1, 2048, -1, -1]", style=solid]; +"117 Relu_117" -> "123 Add_123" [label="[-1, 2048, -1, -1]", style=solid]; +"118 Conv_118" -> "119 Relu_119" [label="[-1, 512, -1, -1]", style=solid]; +"119 Relu_119" -> "120 Conv_120" [label="[-1, 512, -1, -1]", style=solid]; +"120 Conv_120" -> "121 Relu_121" [label="[-1, 512, -1, -1]", style=solid]; +"121 Relu_121" -> "122 Conv_122" [label="[-1, 512, -1, -1]", style=solid]; +"122 Conv_122" -> "123 Add_123" [label="[-1, 2048, -1, -1]", style=solid]; +"123 Add_123" -> "124 Relu_124" [label="[-1, 2048, -1, -1]", style=solid]; +"124 Relu_124" -> "125 Conv_125" [label="[-1, 2048, -1, -1]", style=solid]; +"125 Conv_125" -> "126 Relu_126" [label="[-1, 512, -1, -1]", style=solid]; +"126 Relu_126" -> "127 Conv_127" [label="[-1, 512, -1, -1]", style=solid]; +"127 Conv_127" -> "134 Shape_132" [label="[-1, 21, -1, -1]", style=solid]; +"127 Conv_127" -> "142 Resize_140" [label="[-1, 21, -1, -1]", style=solid]; "128 Constant_nncf_128" -> "129 Unsqueeze_128" [label="[1]", style=dashed]; "129 Unsqueeze_128" -> "132 Concat_130" [label="[1]", style=dashed]; "130 Constant_nncf_130" -> "131 Unsqueeze_129" [label="[1]", style=dashed]; @@ -325,11 +325,11 @@ strict digraph { "139 Cast_137" -> "140 Concat_138" [label="[2]", style=dashed]; "140 Concat_138" -> "142 Resize_140" [label="[4]", style=dashed]; "141 Constant_139" -> "142 Resize_140" [label="[0]", style=solid]; -"142 Resize_140" -> "162 nncf_model_output_0" [label="[1, 21, 1, 1]", style=solid]; -"143 Conv_141" -> "144 Relu_142" [label="[1, 256, 1, 1]", style=solid]; -"144 Relu_142" -> "145 Conv_143" [label="[1, 256, 1, 1]", style=solid]; -"145 Conv_143" -> "152 Shape_148" [label="[1, 21, 1, 1]", style=solid]; -"145 Conv_143" -> "160 Resize_156" [label="[1, 21, 1, 1]", style=solid]; +"142 Resize_140" -> "162 nncf_model_output_0" [label="[-1, 21, -1, -1]", style=solid]; +"143 Conv_141" -> "144 Relu_142" [label="[-1, 256, -1, -1]", style=solid]; +"144 Relu_142" -> "145 Conv_143" [label="[-1, 256, -1, -1]", style=solid]; +"145 Conv_143" -> "152 Shape_148" [label="[-1, 21, -1, -1]", style=solid]; +"145 Conv_143" -> "160 Resize_156" [label="[-1, 21, -1, -1]", style=solid]; "146 Constant_nncf_146" -> "147 Unsqueeze_144" [label="[1]", style=dashed]; "147 Unsqueeze_144" -> "150 Concat_146" [label="[1]", style=dashed]; "148 Constant_nncf_148" -> "149 Unsqueeze_145" [label="[1]", style=dashed]; @@ -344,8 +344,8 @@ strict digraph { "157 Cast_153" -> "158 Concat_154" [label="[2]", style=dashed]; "158 Concat_154" -> "160 Resize_156" [label="[4]", style=dashed]; "159 Constant_155" -> "160 Resize_156" [label="[0]", style=solid]; -"160 Resize_156" -> "163 nncf_model_output_1" [label="[1, 21, 1, 1]", style=solid]; -"161 nncf_model_input_0" -> "0 Shape_0" [label="[1, 3, 1, 1]", style=solid]; -"161 nncf_model_input_0" -> "3 Shape_3" [label="[1, 3, 1, 1]", style=solid]; -"161 nncf_model_input_0" -> "6 Conv_6" [label="[1, 3, 1, 1]", style=solid]; +"160 Resize_156" -> "163 nncf_model_output_1" [label="[-1, 21, -1, -1]", style=solid]; +"161 nncf_model_input_0" -> "0 Shape_0" [label="[-1, 3, -1, -1]", style=solid]; +"161 nncf_model_input_0" -> "3 Shape_3" [label="[-1, 3, -1, -1]", style=solid]; +"161 nncf_model_input_0" -> "6 Conv_6" [label="[-1, 3, -1, -1]", style=solid]; } diff --git a/tests/onnx/data/reference_graphs/original_nncf_graph/shufflenet_v2_x1_0.dot b/tests/onnx/data/reference_graphs/original_nncf_graph/shufflenet_v2_x1_0.dot index 3ffac2720ec..63775ac495e 100644 --- a/tests/onnx/data/reference_graphs/original_nncf_graph/shufflenet_v2_x1_0.dot +++ b/tests/onnx/data/reference_graphs/original_nncf_graph/shufflenet_v2_x1_0.dot @@ -409,16 +409,16 @@ strict digraph { "25 /stage2/stage2.1/Constant_4" -> "26 /stage2/stage2.1/Mul" [label="[1]", style=dashed]; "26 /stage2/stage2.1/Mul" -> "27 /stage2/stage2.1/Slice" [label="[1]", style=dashed]; "26 /stage2/stage2.1/Mul" -> "30 /stage2/stage2.1/Slice_1" [label="[1]", style=dashed]; -"27 /stage2/stage2.1/Slice" -> "36 /stage2/stage2.1/Concat" [label="[]", style=solid]; +"27 /stage2/stage2.1/Slice" -> "36 /stage2/stage2.1/Concat" [label="[-1, -1, -1, -1]", style=solid]; "28 /stage2/stage2.1/Constant_5" -> "29 /stage2/stage2.1/Mul_1" [label="[1]", style=dashed]; "29 /stage2/stage2.1/Mul_1" -> "30 /stage2/stage2.1/Slice_1" [label="[1]", style=dashed]; -"30 /stage2/stage2.1/Slice_1" -> "31 /stage2/stage2.1/branch2/branch2.0/Conv" [label="[]", style=solid]; -"31 /stage2/stage2.1/branch2/branch2.0/Conv" -> "32 /stage2/stage2.1/branch2/branch2.2/Relu" [label="[]", style=solid]; -"32 /stage2/stage2.1/branch2/branch2.2/Relu" -> "33 /stage2/stage2.1/branch2/branch2.3/Conv" [label="[]", style=solid]; -"33 /stage2/stage2.1/branch2/branch2.3/Conv" -> "34 /stage2/stage2.1/branch2/branch2.5/Conv" [label="[]", style=solid]; -"34 /stage2/stage2.1/branch2/branch2.5/Conv" -> "35 /stage2/stage2.1/branch2/branch2.7/Relu" [label="[]", style=solid]; -"35 /stage2/stage2.1/branch2/branch2.7/Relu" -> "36 /stage2/stage2.1/Concat" [label="[]", style=solid]; -"36 /stage2/stage2.1/Concat" -> "38 /stage2/stage2.1/Reshape" [label="[]", style=solid]; +"30 /stage2/stage2.1/Slice_1" -> "31 /stage2/stage2.1/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; +"31 /stage2/stage2.1/branch2/branch2.0/Conv" -> "32 /stage2/stage2.1/branch2/branch2.2/Relu" [label="[-1, 58, -1, -1]", style=solid]; +"32 /stage2/stage2.1/branch2/branch2.2/Relu" -> "33 /stage2/stage2.1/branch2/branch2.3/Conv" [label="[-1, 58, -1, -1]", style=solid]; +"33 /stage2/stage2.1/branch2/branch2.3/Conv" -> "34 /stage2/stage2.1/branch2/branch2.5/Conv" [label="[-1, 58, -1, -1]", style=solid]; +"34 /stage2/stage2.1/branch2/branch2.5/Conv" -> "35 /stage2/stage2.1/branch2/branch2.7/Relu" [label="[-1, 58, -1, -1]", style=solid]; +"35 /stage2/stage2.1/branch2/branch2.7/Relu" -> "36 /stage2/stage2.1/Concat" [label="[-1, 58, -1, -1]", style=solid]; +"36 /stage2/stage2.1/Concat" -> "38 /stage2/stage2.1/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "37 /stage2/stage2.1/Constant_6" -> "38 /stage2/stage2.1/Reshape" [label="[5]", style=dashed]; "38 /stage2/stage2.1/Reshape" -> "39 /stage2/stage2.1/Transpose" [label="[1, 2, 58, 28, 28]", style=solid]; "39 /stage2/stage2.1/Transpose" -> "41 /stage2/stage2.1/Reshape_1" [label="[1, 58, 2, 28, 28]", style=solid]; @@ -440,16 +440,16 @@ strict digraph { "50 /stage2/stage2.2/Constant_4" -> "51 /stage2/stage2.2/Mul" [label="[1]", style=dashed]; "51 /stage2/stage2.2/Mul" -> "52 /stage2/stage2.2/Slice" [label="[1]", style=dashed]; "51 /stage2/stage2.2/Mul" -> "55 /stage2/stage2.2/Slice_1" [label="[1]", style=dashed]; -"52 /stage2/stage2.2/Slice" -> "61 /stage2/stage2.2/Concat" [label="[]", style=solid]; +"52 /stage2/stage2.2/Slice" -> "61 /stage2/stage2.2/Concat" [label="[-1, -1, -1, -1]", style=solid]; "53 /stage2/stage2.2/Constant_5" -> "54 /stage2/stage2.2/Mul_1" [label="[1]", style=dashed]; "54 /stage2/stage2.2/Mul_1" -> "55 /stage2/stage2.2/Slice_1" [label="[1]", style=dashed]; -"55 /stage2/stage2.2/Slice_1" -> "56 /stage2/stage2.2/branch2/branch2.0/Conv" [label="[]", style=solid]; -"56 /stage2/stage2.2/branch2/branch2.0/Conv" -> "57 /stage2/stage2.2/branch2/branch2.2/Relu" [label="[]", style=solid]; -"57 /stage2/stage2.2/branch2/branch2.2/Relu" -> "58 /stage2/stage2.2/branch2/branch2.3/Conv" [label="[]", style=solid]; -"58 /stage2/stage2.2/branch2/branch2.3/Conv" -> "59 /stage2/stage2.2/branch2/branch2.5/Conv" [label="[]", style=solid]; -"59 /stage2/stage2.2/branch2/branch2.5/Conv" -> "60 /stage2/stage2.2/branch2/branch2.7/Relu" [label="[]", style=solid]; -"60 /stage2/stage2.2/branch2/branch2.7/Relu" -> "61 /stage2/stage2.2/Concat" [label="[]", style=solid]; -"61 /stage2/stage2.2/Concat" -> "63 /stage2/stage2.2/Reshape" [label="[]", style=solid]; +"55 /stage2/stage2.2/Slice_1" -> "56 /stage2/stage2.2/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; +"56 /stage2/stage2.2/branch2/branch2.0/Conv" -> "57 /stage2/stage2.2/branch2/branch2.2/Relu" [label="[-1, 58, -1, -1]", style=solid]; +"57 /stage2/stage2.2/branch2/branch2.2/Relu" -> "58 /stage2/stage2.2/branch2/branch2.3/Conv" [label="[-1, 58, -1, -1]", style=solid]; +"58 /stage2/stage2.2/branch2/branch2.3/Conv" -> "59 /stage2/stage2.2/branch2/branch2.5/Conv" [label="[-1, 58, -1, -1]", style=solid]; +"59 /stage2/stage2.2/branch2/branch2.5/Conv" -> "60 /stage2/stage2.2/branch2/branch2.7/Relu" [label="[-1, 58, -1, -1]", style=solid]; +"60 /stage2/stage2.2/branch2/branch2.7/Relu" -> "61 /stage2/stage2.2/Concat" [label="[-1, 58, -1, -1]", style=solid]; +"61 /stage2/stage2.2/Concat" -> "63 /stage2/stage2.2/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "62 /stage2/stage2.2/Constant_6" -> "63 /stage2/stage2.2/Reshape" [label="[5]", style=dashed]; "63 /stage2/stage2.2/Reshape" -> "64 /stage2/stage2.2/Transpose" [label="[1, 2, 58, 28, 28]", style=solid]; "64 /stage2/stage2.2/Transpose" -> "66 /stage2/stage2.2/Reshape_1" [label="[1, 58, 2, 28, 28]", style=solid]; @@ -471,16 +471,16 @@ strict digraph { "75 /stage2/stage2.3/Constant_4" -> "76 /stage2/stage2.3/Mul" [label="[1]", style=dashed]; "76 /stage2/stage2.3/Mul" -> "77 /stage2/stage2.3/Slice" [label="[1]", style=dashed]; "76 /stage2/stage2.3/Mul" -> "80 /stage2/stage2.3/Slice_1" [label="[1]", style=dashed]; -"77 /stage2/stage2.3/Slice" -> "86 /stage2/stage2.3/Concat" [label="[]", style=solid]; +"77 /stage2/stage2.3/Slice" -> "86 /stage2/stage2.3/Concat" [label="[-1, -1, -1, -1]", style=solid]; "78 /stage2/stage2.3/Constant_5" -> "79 /stage2/stage2.3/Mul_1" [label="[1]", style=dashed]; "79 /stage2/stage2.3/Mul_1" -> "80 /stage2/stage2.3/Slice_1" [label="[1]", style=dashed]; -"80 /stage2/stage2.3/Slice_1" -> "81 /stage2/stage2.3/branch2/branch2.0/Conv" [label="[]", style=solid]; -"81 /stage2/stage2.3/branch2/branch2.0/Conv" -> "82 /stage2/stage2.3/branch2/branch2.2/Relu" [label="[]", style=solid]; -"82 /stage2/stage2.3/branch2/branch2.2/Relu" -> "83 /stage2/stage2.3/branch2/branch2.3/Conv" [label="[]", style=solid]; -"83 /stage2/stage2.3/branch2/branch2.3/Conv" -> "84 /stage2/stage2.3/branch2/branch2.5/Conv" [label="[]", style=solid]; -"84 /stage2/stage2.3/branch2/branch2.5/Conv" -> "85 /stage2/stage2.3/branch2/branch2.7/Relu" [label="[]", style=solid]; -"85 /stage2/stage2.3/branch2/branch2.7/Relu" -> "86 /stage2/stage2.3/Concat" [label="[]", style=solid]; -"86 /stage2/stage2.3/Concat" -> "88 /stage2/stage2.3/Reshape" [label="[]", style=solid]; +"80 /stage2/stage2.3/Slice_1" -> "81 /stage2/stage2.3/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; +"81 /stage2/stage2.3/branch2/branch2.0/Conv" -> "82 /stage2/stage2.3/branch2/branch2.2/Relu" [label="[-1, 58, -1, -1]", style=solid]; +"82 /stage2/stage2.3/branch2/branch2.2/Relu" -> "83 /stage2/stage2.3/branch2/branch2.3/Conv" [label="[-1, 58, -1, -1]", style=solid]; +"83 /stage2/stage2.3/branch2/branch2.3/Conv" -> "84 /stage2/stage2.3/branch2/branch2.5/Conv" [label="[-1, 58, -1, -1]", style=solid]; +"84 /stage2/stage2.3/branch2/branch2.5/Conv" -> "85 /stage2/stage2.3/branch2/branch2.7/Relu" [label="[-1, 58, -1, -1]", style=solid]; +"85 /stage2/stage2.3/branch2/branch2.7/Relu" -> "86 /stage2/stage2.3/Concat" [label="[-1, 58, -1, -1]", style=solid]; +"86 /stage2/stage2.3/Concat" -> "88 /stage2/stage2.3/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "87 /stage2/stage2.3/Constant_6" -> "88 /stage2/stage2.3/Reshape" [label="[5]", style=dashed]; "88 /stage2/stage2.3/Reshape" -> "89 /stage2/stage2.3/Transpose" [label="[1, 2, 58, 28, 28]", style=solid]; "89 /stage2/stage2.3/Transpose" -> "91 /stage2/stage2.3/Reshape_1" [label="[1, 58, 2, 28, 28]", style=solid]; @@ -517,16 +517,16 @@ strict digraph { "114 /stage3/stage3.1/Constant_4" -> "115 /stage3/stage3.1/Mul" [label="[1]", style=dashed]; "115 /stage3/stage3.1/Mul" -> "116 /stage3/stage3.1/Slice" [label="[1]", style=dashed]; "115 /stage3/stage3.1/Mul" -> "119 /stage3/stage3.1/Slice_1" [label="[1]", style=dashed]; -"116 /stage3/stage3.1/Slice" -> "125 /stage3/stage3.1/Concat" [label="[]", style=solid]; +"116 /stage3/stage3.1/Slice" -> "125 /stage3/stage3.1/Concat" [label="[-1, -1, -1, -1]", style=solid]; "117 /stage3/stage3.1/Constant_5" -> "118 /stage3/stage3.1/Mul_1" [label="[1]", style=dashed]; "118 /stage3/stage3.1/Mul_1" -> "119 /stage3/stage3.1/Slice_1" [label="[1]", style=dashed]; -"119 /stage3/stage3.1/Slice_1" -> "120 /stage3/stage3.1/branch2/branch2.0/Conv" [label="[]", style=solid]; -"120 /stage3/stage3.1/branch2/branch2.0/Conv" -> "121 /stage3/stage3.1/branch2/branch2.2/Relu" [label="[]", style=solid]; -"121 /stage3/stage3.1/branch2/branch2.2/Relu" -> "122 /stage3/stage3.1/branch2/branch2.3/Conv" [label="[]", style=solid]; -"122 /stage3/stage3.1/branch2/branch2.3/Conv" -> "123 /stage3/stage3.1/branch2/branch2.5/Conv" [label="[]", style=solid]; -"123 /stage3/stage3.1/branch2/branch2.5/Conv" -> "124 /stage3/stage3.1/branch2/branch2.7/Relu" [label="[]", style=solid]; -"124 /stage3/stage3.1/branch2/branch2.7/Relu" -> "125 /stage3/stage3.1/Concat" [label="[]", style=solid]; -"125 /stage3/stage3.1/Concat" -> "127 /stage3/stage3.1/Reshape" [label="[]", style=solid]; +"119 /stage3/stage3.1/Slice_1" -> "120 /stage3/stage3.1/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; +"120 /stage3/stage3.1/branch2/branch2.0/Conv" -> "121 /stage3/stage3.1/branch2/branch2.2/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"121 /stage3/stage3.1/branch2/branch2.2/Relu" -> "122 /stage3/stage3.1/branch2/branch2.3/Conv" [label="[-1, 116, -1, -1]", style=solid]; +"122 /stage3/stage3.1/branch2/branch2.3/Conv" -> "123 /stage3/stage3.1/branch2/branch2.5/Conv" [label="[-1, 116, -1, -1]", style=solid]; +"123 /stage3/stage3.1/branch2/branch2.5/Conv" -> "124 /stage3/stage3.1/branch2/branch2.7/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"124 /stage3/stage3.1/branch2/branch2.7/Relu" -> "125 /stage3/stage3.1/Concat" [label="[-1, 116, -1, -1]", style=solid]; +"125 /stage3/stage3.1/Concat" -> "127 /stage3/stage3.1/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "126 /stage3/stage3.1/Constant_6" -> "127 /stage3/stage3.1/Reshape" [label="[5]", style=dashed]; "127 /stage3/stage3.1/Reshape" -> "128 /stage3/stage3.1/Transpose" [label="[1, 2, 116, 14, 14]", style=solid]; "128 /stage3/stage3.1/Transpose" -> "130 /stage3/stage3.1/Reshape_1" [label="[1, 116, 2, 14, 14]", style=solid]; @@ -548,16 +548,16 @@ strict digraph { "139 /stage3/stage3.2/Constant_4" -> "140 /stage3/stage3.2/Mul" [label="[1]", style=dashed]; "140 /stage3/stage3.2/Mul" -> "141 /stage3/stage3.2/Slice" [label="[1]", style=dashed]; "140 /stage3/stage3.2/Mul" -> "144 /stage3/stage3.2/Slice_1" [label="[1]", style=dashed]; -"141 /stage3/stage3.2/Slice" -> "150 /stage3/stage3.2/Concat" [label="[]", style=solid]; +"141 /stage3/stage3.2/Slice" -> "150 /stage3/stage3.2/Concat" [label="[-1, -1, -1, -1]", style=solid]; "142 /stage3/stage3.2/Constant_5" -> "143 /stage3/stage3.2/Mul_1" [label="[1]", style=dashed]; "143 /stage3/stage3.2/Mul_1" -> "144 /stage3/stage3.2/Slice_1" [label="[1]", style=dashed]; -"144 /stage3/stage3.2/Slice_1" -> "145 /stage3/stage3.2/branch2/branch2.0/Conv" [label="[]", style=solid]; -"145 /stage3/stage3.2/branch2/branch2.0/Conv" -> "146 /stage3/stage3.2/branch2/branch2.2/Relu" [label="[]", style=solid]; -"146 /stage3/stage3.2/branch2/branch2.2/Relu" -> "147 /stage3/stage3.2/branch2/branch2.3/Conv" [label="[]", style=solid]; -"147 /stage3/stage3.2/branch2/branch2.3/Conv" -> "148 /stage3/stage3.2/branch2/branch2.5/Conv" [label="[]", style=solid]; -"148 /stage3/stage3.2/branch2/branch2.5/Conv" -> "149 /stage3/stage3.2/branch2/branch2.7/Relu" [label="[]", style=solid]; -"149 /stage3/stage3.2/branch2/branch2.7/Relu" -> "150 /stage3/stage3.2/Concat" [label="[]", style=solid]; -"150 /stage3/stage3.2/Concat" -> "152 /stage3/stage3.2/Reshape" [label="[]", style=solid]; +"144 /stage3/stage3.2/Slice_1" -> "145 /stage3/stage3.2/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; +"145 /stage3/stage3.2/branch2/branch2.0/Conv" -> "146 /stage3/stage3.2/branch2/branch2.2/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"146 /stage3/stage3.2/branch2/branch2.2/Relu" -> "147 /stage3/stage3.2/branch2/branch2.3/Conv" [label="[-1, 116, -1, -1]", style=solid]; +"147 /stage3/stage3.2/branch2/branch2.3/Conv" -> "148 /stage3/stage3.2/branch2/branch2.5/Conv" [label="[-1, 116, -1, -1]", style=solid]; +"148 /stage3/stage3.2/branch2/branch2.5/Conv" -> "149 /stage3/stage3.2/branch2/branch2.7/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"149 /stage3/stage3.2/branch2/branch2.7/Relu" -> "150 /stage3/stage3.2/Concat" [label="[-1, 116, -1, -1]", style=solid]; +"150 /stage3/stage3.2/Concat" -> "152 /stage3/stage3.2/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "151 /stage3/stage3.2/Constant_6" -> "152 /stage3/stage3.2/Reshape" [label="[5]", style=dashed]; "152 /stage3/stage3.2/Reshape" -> "153 /stage3/stage3.2/Transpose" [label="[1, 2, 116, 14, 14]", style=solid]; "153 /stage3/stage3.2/Transpose" -> "155 /stage3/stage3.2/Reshape_1" [label="[1, 116, 2, 14, 14]", style=solid]; @@ -579,16 +579,16 @@ strict digraph { "164 /stage3/stage3.3/Constant_4" -> "165 /stage3/stage3.3/Mul" [label="[1]", style=dashed]; "165 /stage3/stage3.3/Mul" -> "166 /stage3/stage3.3/Slice" [label="[1]", style=dashed]; "165 /stage3/stage3.3/Mul" -> "169 /stage3/stage3.3/Slice_1" [label="[1]", style=dashed]; -"166 /stage3/stage3.3/Slice" -> "175 /stage3/stage3.3/Concat" [label="[]", style=solid]; +"166 /stage3/stage3.3/Slice" -> "175 /stage3/stage3.3/Concat" [label="[-1, -1, -1, -1]", style=solid]; "167 /stage3/stage3.3/Constant_5" -> "168 /stage3/stage3.3/Mul_1" [label="[1]", style=dashed]; "168 /stage3/stage3.3/Mul_1" -> "169 /stage3/stage3.3/Slice_1" [label="[1]", style=dashed]; -"169 /stage3/stage3.3/Slice_1" -> "170 /stage3/stage3.3/branch2/branch2.0/Conv" [label="[]", style=solid]; -"170 /stage3/stage3.3/branch2/branch2.0/Conv" -> "171 /stage3/stage3.3/branch2/branch2.2/Relu" [label="[]", style=solid]; -"171 /stage3/stage3.3/branch2/branch2.2/Relu" -> "172 /stage3/stage3.3/branch2/branch2.3/Conv" [label="[]", style=solid]; -"172 /stage3/stage3.3/branch2/branch2.3/Conv" -> "173 /stage3/stage3.3/branch2/branch2.5/Conv" [label="[]", style=solid]; -"173 /stage3/stage3.3/branch2/branch2.5/Conv" -> "174 /stage3/stage3.3/branch2/branch2.7/Relu" [label="[]", style=solid]; -"174 /stage3/stage3.3/branch2/branch2.7/Relu" -> "175 /stage3/stage3.3/Concat" [label="[]", style=solid]; -"175 /stage3/stage3.3/Concat" -> "177 /stage3/stage3.3/Reshape" [label="[]", style=solid]; +"169 /stage3/stage3.3/Slice_1" -> "170 /stage3/stage3.3/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; +"170 /stage3/stage3.3/branch2/branch2.0/Conv" -> "171 /stage3/stage3.3/branch2/branch2.2/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"171 /stage3/stage3.3/branch2/branch2.2/Relu" -> "172 /stage3/stage3.3/branch2/branch2.3/Conv" [label="[-1, 116, -1, -1]", style=solid]; +"172 /stage3/stage3.3/branch2/branch2.3/Conv" -> "173 /stage3/stage3.3/branch2/branch2.5/Conv" [label="[-1, 116, -1, -1]", style=solid]; +"173 /stage3/stage3.3/branch2/branch2.5/Conv" -> "174 /stage3/stage3.3/branch2/branch2.7/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"174 /stage3/stage3.3/branch2/branch2.7/Relu" -> "175 /stage3/stage3.3/Concat" [label="[-1, 116, -1, -1]", style=solid]; +"175 /stage3/stage3.3/Concat" -> "177 /stage3/stage3.3/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "176 /stage3/stage3.3/Constant_6" -> "177 /stage3/stage3.3/Reshape" [label="[5]", style=dashed]; "177 /stage3/stage3.3/Reshape" -> "178 /stage3/stage3.3/Transpose" [label="[1, 2, 116, 14, 14]", style=solid]; "178 /stage3/stage3.3/Transpose" -> "180 /stage3/stage3.3/Reshape_1" [label="[1, 116, 2, 14, 14]", style=solid]; @@ -610,16 +610,16 @@ strict digraph { "189 /stage3/stage3.4/Constant_4" -> "190 /stage3/stage3.4/Mul" [label="[1]", style=dashed]; "190 /stage3/stage3.4/Mul" -> "191 /stage3/stage3.4/Slice" [label="[1]", style=dashed]; "190 /stage3/stage3.4/Mul" -> "194 /stage3/stage3.4/Slice_1" [label="[1]", style=dashed]; -"191 /stage3/stage3.4/Slice" -> "200 /stage3/stage3.4/Concat" [label="[]", style=solid]; +"191 /stage3/stage3.4/Slice" -> "200 /stage3/stage3.4/Concat" [label="[-1, -1, -1, -1]", style=solid]; "192 /stage3/stage3.4/Constant_5" -> "193 /stage3/stage3.4/Mul_1" [label="[1]", style=dashed]; "193 /stage3/stage3.4/Mul_1" -> "194 /stage3/stage3.4/Slice_1" [label="[1]", style=dashed]; -"194 /stage3/stage3.4/Slice_1" -> "195 /stage3/stage3.4/branch2/branch2.0/Conv" [label="[]", style=solid]; -"195 /stage3/stage3.4/branch2/branch2.0/Conv" -> "196 /stage3/stage3.4/branch2/branch2.2/Relu" [label="[]", style=solid]; -"196 /stage3/stage3.4/branch2/branch2.2/Relu" -> "197 /stage3/stage3.4/branch2/branch2.3/Conv" [label="[]", style=solid]; -"197 /stage3/stage3.4/branch2/branch2.3/Conv" -> "198 /stage3/stage3.4/branch2/branch2.5/Conv" [label="[]", style=solid]; -"198 /stage3/stage3.4/branch2/branch2.5/Conv" -> "199 /stage3/stage3.4/branch2/branch2.7/Relu" [label="[]", style=solid]; -"199 /stage3/stage3.4/branch2/branch2.7/Relu" -> "200 /stage3/stage3.4/Concat" [label="[]", style=solid]; -"200 /stage3/stage3.4/Concat" -> "202 /stage3/stage3.4/Reshape" [label="[]", style=solid]; +"194 /stage3/stage3.4/Slice_1" -> "195 /stage3/stage3.4/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; +"195 /stage3/stage3.4/branch2/branch2.0/Conv" -> "196 /stage3/stage3.4/branch2/branch2.2/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"196 /stage3/stage3.4/branch2/branch2.2/Relu" -> "197 /stage3/stage3.4/branch2/branch2.3/Conv" [label="[-1, 116, -1, -1]", style=solid]; +"197 /stage3/stage3.4/branch2/branch2.3/Conv" -> "198 /stage3/stage3.4/branch2/branch2.5/Conv" [label="[-1, 116, -1, -1]", style=solid]; +"198 /stage3/stage3.4/branch2/branch2.5/Conv" -> "199 /stage3/stage3.4/branch2/branch2.7/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"199 /stage3/stage3.4/branch2/branch2.7/Relu" -> "200 /stage3/stage3.4/Concat" [label="[-1, 116, -1, -1]", style=solid]; +"200 /stage3/stage3.4/Concat" -> "202 /stage3/stage3.4/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "201 /stage3/stage3.4/Constant_6" -> "202 /stage3/stage3.4/Reshape" [label="[5]", style=dashed]; "202 /stage3/stage3.4/Reshape" -> "203 /stage3/stage3.4/Transpose" [label="[1, 2, 116, 14, 14]", style=solid]; "203 /stage3/stage3.4/Transpose" -> "205 /stage3/stage3.4/Reshape_1" [label="[1, 116, 2, 14, 14]", style=solid]; @@ -641,16 +641,16 @@ strict digraph { "214 /stage3/stage3.5/Constant_4" -> "215 /stage3/stage3.5/Mul" [label="[1]", style=dashed]; "215 /stage3/stage3.5/Mul" -> "216 /stage3/stage3.5/Slice" [label="[1]", style=dashed]; "215 /stage3/stage3.5/Mul" -> "219 /stage3/stage3.5/Slice_1" [label="[1]", style=dashed]; -"216 /stage3/stage3.5/Slice" -> "225 /stage3/stage3.5/Concat" [label="[]", style=solid]; +"216 /stage3/stage3.5/Slice" -> "225 /stage3/stage3.5/Concat" [label="[-1, -1, -1, -1]", style=solid]; "217 /stage3/stage3.5/Constant_5" -> "218 /stage3/stage3.5/Mul_1" [label="[1]", style=dashed]; "218 /stage3/stage3.5/Mul_1" -> "219 /stage3/stage3.5/Slice_1" [label="[1]", style=dashed]; -"219 /stage3/stage3.5/Slice_1" -> "220 /stage3/stage3.5/branch2/branch2.0/Conv" [label="[]", style=solid]; -"220 /stage3/stage3.5/branch2/branch2.0/Conv" -> "221 /stage3/stage3.5/branch2/branch2.2/Relu" [label="[]", style=solid]; -"221 /stage3/stage3.5/branch2/branch2.2/Relu" -> "222 /stage3/stage3.5/branch2/branch2.3/Conv" [label="[]", style=solid]; -"222 /stage3/stage3.5/branch2/branch2.3/Conv" -> "223 /stage3/stage3.5/branch2/branch2.5/Conv" [label="[]", style=solid]; -"223 /stage3/stage3.5/branch2/branch2.5/Conv" -> "224 /stage3/stage3.5/branch2/branch2.7/Relu" [label="[]", style=solid]; -"224 /stage3/stage3.5/branch2/branch2.7/Relu" -> "225 /stage3/stage3.5/Concat" [label="[]", style=solid]; -"225 /stage3/stage3.5/Concat" -> "227 /stage3/stage3.5/Reshape" [label="[]", style=solid]; +"219 /stage3/stage3.5/Slice_1" -> "220 /stage3/stage3.5/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; +"220 /stage3/stage3.5/branch2/branch2.0/Conv" -> "221 /stage3/stage3.5/branch2/branch2.2/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"221 /stage3/stage3.5/branch2/branch2.2/Relu" -> "222 /stage3/stage3.5/branch2/branch2.3/Conv" [label="[-1, 116, -1, -1]", style=solid]; +"222 /stage3/stage3.5/branch2/branch2.3/Conv" -> "223 /stage3/stage3.5/branch2/branch2.5/Conv" [label="[-1, 116, -1, -1]", style=solid]; +"223 /stage3/stage3.5/branch2/branch2.5/Conv" -> "224 /stage3/stage3.5/branch2/branch2.7/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"224 /stage3/stage3.5/branch2/branch2.7/Relu" -> "225 /stage3/stage3.5/Concat" [label="[-1, 116, -1, -1]", style=solid]; +"225 /stage3/stage3.5/Concat" -> "227 /stage3/stage3.5/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "226 /stage3/stage3.5/Constant_6" -> "227 /stage3/stage3.5/Reshape" [label="[5]", style=dashed]; "227 /stage3/stage3.5/Reshape" -> "228 /stage3/stage3.5/Transpose" [label="[1, 2, 116, 14, 14]", style=solid]; "228 /stage3/stage3.5/Transpose" -> "230 /stage3/stage3.5/Reshape_1" [label="[1, 116, 2, 14, 14]", style=solid]; @@ -672,16 +672,16 @@ strict digraph { "239 /stage3/stage3.6/Constant_4" -> "240 /stage3/stage3.6/Mul" [label="[1]", style=dashed]; "240 /stage3/stage3.6/Mul" -> "241 /stage3/stage3.6/Slice" [label="[1]", style=dashed]; "240 /stage3/stage3.6/Mul" -> "244 /stage3/stage3.6/Slice_1" [label="[1]", style=dashed]; -"241 /stage3/stage3.6/Slice" -> "250 /stage3/stage3.6/Concat" [label="[]", style=solid]; +"241 /stage3/stage3.6/Slice" -> "250 /stage3/stage3.6/Concat" [label="[-1, -1, -1, -1]", style=solid]; "242 /stage3/stage3.6/Constant_5" -> "243 /stage3/stage3.6/Mul_1" [label="[1]", style=dashed]; "243 /stage3/stage3.6/Mul_1" -> "244 /stage3/stage3.6/Slice_1" [label="[1]", style=dashed]; -"244 /stage3/stage3.6/Slice_1" -> "245 /stage3/stage3.6/branch2/branch2.0/Conv" [label="[]", style=solid]; -"245 /stage3/stage3.6/branch2/branch2.0/Conv" -> "246 /stage3/stage3.6/branch2/branch2.2/Relu" [label="[]", style=solid]; -"246 /stage3/stage3.6/branch2/branch2.2/Relu" -> "247 /stage3/stage3.6/branch2/branch2.3/Conv" [label="[]", style=solid]; -"247 /stage3/stage3.6/branch2/branch2.3/Conv" -> "248 /stage3/stage3.6/branch2/branch2.5/Conv" [label="[]", style=solid]; -"248 /stage3/stage3.6/branch2/branch2.5/Conv" -> "249 /stage3/stage3.6/branch2/branch2.7/Relu" [label="[]", style=solid]; -"249 /stage3/stage3.6/branch2/branch2.7/Relu" -> "250 /stage3/stage3.6/Concat" [label="[]", style=solid]; -"250 /stage3/stage3.6/Concat" -> "252 /stage3/stage3.6/Reshape" [label="[]", style=solid]; +"244 /stage3/stage3.6/Slice_1" -> "245 /stage3/stage3.6/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; +"245 /stage3/stage3.6/branch2/branch2.0/Conv" -> "246 /stage3/stage3.6/branch2/branch2.2/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"246 /stage3/stage3.6/branch2/branch2.2/Relu" -> "247 /stage3/stage3.6/branch2/branch2.3/Conv" [label="[-1, 116, -1, -1]", style=solid]; +"247 /stage3/stage3.6/branch2/branch2.3/Conv" -> "248 /stage3/stage3.6/branch2/branch2.5/Conv" [label="[-1, 116, -1, -1]", style=solid]; +"248 /stage3/stage3.6/branch2/branch2.5/Conv" -> "249 /stage3/stage3.6/branch2/branch2.7/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"249 /stage3/stage3.6/branch2/branch2.7/Relu" -> "250 /stage3/stage3.6/Concat" [label="[-1, 116, -1, -1]", style=solid]; +"250 /stage3/stage3.6/Concat" -> "252 /stage3/stage3.6/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "251 /stage3/stage3.6/Constant_6" -> "252 /stage3/stage3.6/Reshape" [label="[5]", style=dashed]; "252 /stage3/stage3.6/Reshape" -> "253 /stage3/stage3.6/Transpose" [label="[1, 2, 116, 14, 14]", style=solid]; "253 /stage3/stage3.6/Transpose" -> "255 /stage3/stage3.6/Reshape_1" [label="[1, 116, 2, 14, 14]", style=solid]; @@ -703,16 +703,16 @@ strict digraph { "264 /stage3/stage3.7/Constant_4" -> "265 /stage3/stage3.7/Mul" [label="[1]", style=dashed]; "265 /stage3/stage3.7/Mul" -> "266 /stage3/stage3.7/Slice" [label="[1]", style=dashed]; "265 /stage3/stage3.7/Mul" -> "269 /stage3/stage3.7/Slice_1" [label="[1]", style=dashed]; -"266 /stage3/stage3.7/Slice" -> "275 /stage3/stage3.7/Concat" [label="[]", style=solid]; +"266 /stage3/stage3.7/Slice" -> "275 /stage3/stage3.7/Concat" [label="[-1, -1, -1, -1]", style=solid]; "267 /stage3/stage3.7/Constant_5" -> "268 /stage3/stage3.7/Mul_1" [label="[1]", style=dashed]; "268 /stage3/stage3.7/Mul_1" -> "269 /stage3/stage3.7/Slice_1" [label="[1]", style=dashed]; -"269 /stage3/stage3.7/Slice_1" -> "270 /stage3/stage3.7/branch2/branch2.0/Conv" [label="[]", style=solid]; -"270 /stage3/stage3.7/branch2/branch2.0/Conv" -> "271 /stage3/stage3.7/branch2/branch2.2/Relu" [label="[]", style=solid]; -"271 /stage3/stage3.7/branch2/branch2.2/Relu" -> "272 /stage3/stage3.7/branch2/branch2.3/Conv" [label="[]", style=solid]; -"272 /stage3/stage3.7/branch2/branch2.3/Conv" -> "273 /stage3/stage3.7/branch2/branch2.5/Conv" [label="[]", style=solid]; -"273 /stage3/stage3.7/branch2/branch2.5/Conv" -> "274 /stage3/stage3.7/branch2/branch2.7/Relu" [label="[]", style=solid]; -"274 /stage3/stage3.7/branch2/branch2.7/Relu" -> "275 /stage3/stage3.7/Concat" [label="[]", style=solid]; -"275 /stage3/stage3.7/Concat" -> "277 /stage3/stage3.7/Reshape" [label="[]", style=solid]; +"269 /stage3/stage3.7/Slice_1" -> "270 /stage3/stage3.7/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; +"270 /stage3/stage3.7/branch2/branch2.0/Conv" -> "271 /stage3/stage3.7/branch2/branch2.2/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"271 /stage3/stage3.7/branch2/branch2.2/Relu" -> "272 /stage3/stage3.7/branch2/branch2.3/Conv" [label="[-1, 116, -1, -1]", style=solid]; +"272 /stage3/stage3.7/branch2/branch2.3/Conv" -> "273 /stage3/stage3.7/branch2/branch2.5/Conv" [label="[-1, 116, -1, -1]", style=solid]; +"273 /stage3/stage3.7/branch2/branch2.5/Conv" -> "274 /stage3/stage3.7/branch2/branch2.7/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"274 /stage3/stage3.7/branch2/branch2.7/Relu" -> "275 /stage3/stage3.7/Concat" [label="[-1, 116, -1, -1]", style=solid]; +"275 /stage3/stage3.7/Concat" -> "277 /stage3/stage3.7/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "276 /stage3/stage3.7/Constant_6" -> "277 /stage3/stage3.7/Reshape" [label="[5]", style=dashed]; "277 /stage3/stage3.7/Reshape" -> "278 /stage3/stage3.7/Transpose" [label="[1, 2, 116, 14, 14]", style=solid]; "278 /stage3/stage3.7/Transpose" -> "280 /stage3/stage3.7/Reshape_1" [label="[1, 116, 2, 14, 14]", style=solid]; @@ -749,16 +749,16 @@ strict digraph { "303 /stage4/stage4.1/Constant_4" -> "304 /stage4/stage4.1/Mul" [label="[1]", style=dashed]; "304 /stage4/stage4.1/Mul" -> "305 /stage4/stage4.1/Slice" [label="[1]", style=dashed]; "304 /stage4/stage4.1/Mul" -> "308 /stage4/stage4.1/Slice_1" [label="[1]", style=dashed]; -"305 /stage4/stage4.1/Slice" -> "314 /stage4/stage4.1/Concat" [label="[]", style=solid]; +"305 /stage4/stage4.1/Slice" -> "314 /stage4/stage4.1/Concat" [label="[-1, -1, -1, -1]", style=solid]; "306 /stage4/stage4.1/Constant_5" -> "307 /stage4/stage4.1/Mul_1" [label="[1]", style=dashed]; "307 /stage4/stage4.1/Mul_1" -> "308 /stage4/stage4.1/Slice_1" [label="[1]", style=dashed]; -"308 /stage4/stage4.1/Slice_1" -> "309 /stage4/stage4.1/branch2/branch2.0/Conv" [label="[]", style=solid]; -"309 /stage4/stage4.1/branch2/branch2.0/Conv" -> "310 /stage4/stage4.1/branch2/branch2.2/Relu" [label="[]", style=solid]; -"310 /stage4/stage4.1/branch2/branch2.2/Relu" -> "311 /stage4/stage4.1/branch2/branch2.3/Conv" [label="[]", style=solid]; -"311 /stage4/stage4.1/branch2/branch2.3/Conv" -> "312 /stage4/stage4.1/branch2/branch2.5/Conv" [label="[]", style=solid]; -"312 /stage4/stage4.1/branch2/branch2.5/Conv" -> "313 /stage4/stage4.1/branch2/branch2.7/Relu" [label="[]", style=solid]; -"313 /stage4/stage4.1/branch2/branch2.7/Relu" -> "314 /stage4/stage4.1/Concat" [label="[]", style=solid]; -"314 /stage4/stage4.1/Concat" -> "316 /stage4/stage4.1/Reshape" [label="[]", style=solid]; +"308 /stage4/stage4.1/Slice_1" -> "309 /stage4/stage4.1/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; +"309 /stage4/stage4.1/branch2/branch2.0/Conv" -> "310 /stage4/stage4.1/branch2/branch2.2/Relu" [label="[-1, 232, -1, -1]", style=solid]; +"310 /stage4/stage4.1/branch2/branch2.2/Relu" -> "311 /stage4/stage4.1/branch2/branch2.3/Conv" [label="[-1, 232, -1, -1]", style=solid]; +"311 /stage4/stage4.1/branch2/branch2.3/Conv" -> "312 /stage4/stage4.1/branch2/branch2.5/Conv" [label="[-1, 232, -1, -1]", style=solid]; +"312 /stage4/stage4.1/branch2/branch2.5/Conv" -> "313 /stage4/stage4.1/branch2/branch2.7/Relu" [label="[-1, 232, -1, -1]", style=solid]; +"313 /stage4/stage4.1/branch2/branch2.7/Relu" -> "314 /stage4/stage4.1/Concat" [label="[-1, 232, -1, -1]", style=solid]; +"314 /stage4/stage4.1/Concat" -> "316 /stage4/stage4.1/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "315 /stage4/stage4.1/Constant_6" -> "316 /stage4/stage4.1/Reshape" [label="[5]", style=dashed]; "316 /stage4/stage4.1/Reshape" -> "317 /stage4/stage4.1/Transpose" [label="[1, 2, 232, 7, 7]", style=solid]; "317 /stage4/stage4.1/Transpose" -> "319 /stage4/stage4.1/Reshape_1" [label="[1, 232, 2, 7, 7]", style=solid]; @@ -780,16 +780,16 @@ strict digraph { "328 /stage4/stage4.2/Constant_4" -> "329 /stage4/stage4.2/Mul" [label="[1]", style=dashed]; "329 /stage4/stage4.2/Mul" -> "330 /stage4/stage4.2/Slice" [label="[1]", style=dashed]; "329 /stage4/stage4.2/Mul" -> "333 /stage4/stage4.2/Slice_1" [label="[1]", style=dashed]; -"330 /stage4/stage4.2/Slice" -> "339 /stage4/stage4.2/Concat" [label="[]", style=solid]; +"330 /stage4/stage4.2/Slice" -> "339 /stage4/stage4.2/Concat" [label="[-1, -1, -1, -1]", style=solid]; "331 /stage4/stage4.2/Constant_5" -> "332 /stage4/stage4.2/Mul_1" [label="[1]", style=dashed]; "332 /stage4/stage4.2/Mul_1" -> "333 /stage4/stage4.2/Slice_1" [label="[1]", style=dashed]; -"333 /stage4/stage4.2/Slice_1" -> "334 /stage4/stage4.2/branch2/branch2.0/Conv" [label="[]", style=solid]; -"334 /stage4/stage4.2/branch2/branch2.0/Conv" -> "335 /stage4/stage4.2/branch2/branch2.2/Relu" [label="[]", style=solid]; -"335 /stage4/stage4.2/branch2/branch2.2/Relu" -> "336 /stage4/stage4.2/branch2/branch2.3/Conv" [label="[]", style=solid]; -"336 /stage4/stage4.2/branch2/branch2.3/Conv" -> "337 /stage4/stage4.2/branch2/branch2.5/Conv" [label="[]", style=solid]; -"337 /stage4/stage4.2/branch2/branch2.5/Conv" -> "338 /stage4/stage4.2/branch2/branch2.7/Relu" [label="[]", style=solid]; -"338 /stage4/stage4.2/branch2/branch2.7/Relu" -> "339 /stage4/stage4.2/Concat" [label="[]", style=solid]; -"339 /stage4/stage4.2/Concat" -> "341 /stage4/stage4.2/Reshape" [label="[]", style=solid]; +"333 /stage4/stage4.2/Slice_1" -> "334 /stage4/stage4.2/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; +"334 /stage4/stage4.2/branch2/branch2.0/Conv" -> "335 /stage4/stage4.2/branch2/branch2.2/Relu" [label="[-1, 232, -1, -1]", style=solid]; +"335 /stage4/stage4.2/branch2/branch2.2/Relu" -> "336 /stage4/stage4.2/branch2/branch2.3/Conv" [label="[-1, 232, -1, -1]", style=solid]; +"336 /stage4/stage4.2/branch2/branch2.3/Conv" -> "337 /stage4/stage4.2/branch2/branch2.5/Conv" [label="[-1, 232, -1, -1]", style=solid]; +"337 /stage4/stage4.2/branch2/branch2.5/Conv" -> "338 /stage4/stage4.2/branch2/branch2.7/Relu" [label="[-1, 232, -1, -1]", style=solid]; +"338 /stage4/stage4.2/branch2/branch2.7/Relu" -> "339 /stage4/stage4.2/Concat" [label="[-1, 232, -1, -1]", style=solid]; +"339 /stage4/stage4.2/Concat" -> "341 /stage4/stage4.2/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "340 /stage4/stage4.2/Constant_6" -> "341 /stage4/stage4.2/Reshape" [label="[5]", style=dashed]; "341 /stage4/stage4.2/Reshape" -> "342 /stage4/stage4.2/Transpose" [label="[1, 2, 232, 7, 7]", style=solid]; "342 /stage4/stage4.2/Transpose" -> "344 /stage4/stage4.2/Reshape_1" [label="[1, 232, 2, 7, 7]", style=solid]; @@ -811,16 +811,16 @@ strict digraph { "353 /stage4/stage4.3/Constant_4" -> "354 /stage4/stage4.3/Mul" [label="[1]", style=dashed]; "354 /stage4/stage4.3/Mul" -> "355 /stage4/stage4.3/Slice" [label="[1]", style=dashed]; "354 /stage4/stage4.3/Mul" -> "358 /stage4/stage4.3/Slice_1" [label="[1]", style=dashed]; -"355 /stage4/stage4.3/Slice" -> "364 /stage4/stage4.3/Concat" [label="[]", style=solid]; +"355 /stage4/stage4.3/Slice" -> "364 /stage4/stage4.3/Concat" [label="[-1, -1, -1, -1]", style=solid]; "356 /stage4/stage4.3/Constant_5" -> "357 /stage4/stage4.3/Mul_1" [label="[1]", style=dashed]; "357 /stage4/stage4.3/Mul_1" -> "358 /stage4/stage4.3/Slice_1" [label="[1]", style=dashed]; -"358 /stage4/stage4.3/Slice_1" -> "359 /stage4/stage4.3/branch2/branch2.0/Conv" [label="[]", style=solid]; -"359 /stage4/stage4.3/branch2/branch2.0/Conv" -> "360 /stage4/stage4.3/branch2/branch2.2/Relu" [label="[]", style=solid]; -"360 /stage4/stage4.3/branch2/branch2.2/Relu" -> "361 /stage4/stage4.3/branch2/branch2.3/Conv" [label="[]", style=solid]; -"361 /stage4/stage4.3/branch2/branch2.3/Conv" -> "362 /stage4/stage4.3/branch2/branch2.5/Conv" [label="[]", style=solid]; -"362 /stage4/stage4.3/branch2/branch2.5/Conv" -> "363 /stage4/stage4.3/branch2/branch2.7/Relu" [label="[]", style=solid]; -"363 /stage4/stage4.3/branch2/branch2.7/Relu" -> "364 /stage4/stage4.3/Concat" [label="[]", style=solid]; -"364 /stage4/stage4.3/Concat" -> "366 /stage4/stage4.3/Reshape" [label="[]", style=solid]; +"358 /stage4/stage4.3/Slice_1" -> "359 /stage4/stage4.3/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; +"359 /stage4/stage4.3/branch2/branch2.0/Conv" -> "360 /stage4/stage4.3/branch2/branch2.2/Relu" [label="[-1, 232, -1, -1]", style=solid]; +"360 /stage4/stage4.3/branch2/branch2.2/Relu" -> "361 /stage4/stage4.3/branch2/branch2.3/Conv" [label="[-1, 232, -1, -1]", style=solid]; +"361 /stage4/stage4.3/branch2/branch2.3/Conv" -> "362 /stage4/stage4.3/branch2/branch2.5/Conv" [label="[-1, 232, -1, -1]", style=solid]; +"362 /stage4/stage4.3/branch2/branch2.5/Conv" -> "363 /stage4/stage4.3/branch2/branch2.7/Relu" [label="[-1, 232, -1, -1]", style=solid]; +"363 /stage4/stage4.3/branch2/branch2.7/Relu" -> "364 /stage4/stage4.3/Concat" [label="[-1, 232, -1, -1]", style=solid]; +"364 /stage4/stage4.3/Concat" -> "366 /stage4/stage4.3/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "365 /stage4/stage4.3/Constant_6" -> "366 /stage4/stage4.3/Reshape" [label="[5]", style=dashed]; "366 /stage4/stage4.3/Reshape" -> "367 /stage4/stage4.3/Transpose" [label="[1, 2, 232, 7, 7]", style=solid]; "367 /stage4/stage4.3/Transpose" -> "369 /stage4/stage4.3/Reshape_1" [label="[1, 232, 2, 7, 7]", style=solid]; diff --git a/tests/onnx/data/reference_graphs/original_nncf_graph/ssd-12.dot b/tests/onnx/data/reference_graphs/original_nncf_graph/ssd-12.dot index 909f02ec524..3427c9e63ad 100644 --- a/tests/onnx/data/reference_graphs/original_nncf_graph/ssd-12.dot +++ b/tests/onnx/data/reference_graphs/original_nncf_graph/ssd-12.dot @@ -124,400 +124,457 @@ strict digraph { "122 Gather_341" [id=122, type=Gather]; "123 Constant_342" [id=123, type=Constant]; "124 Constant_343" [id=124, type=Constant]; -"125 Unsqueeze_344" [id=125, type=Unsqueeze]; -"126 Unsqueeze_345" [id=126, type=Unsqueeze]; -"127 Unsqueeze_346" [id=127, type=Unsqueeze]; -"128 Concat_347" [id=128, type=Concat]; -"129 Reshape_348" [id=129, type=Reshape]; -"130 Conv_349" [id=130, type=Conv]; -"131 Constant_350" [id=131, type=Constant]; -"132 Shape_351" [id=132, type=Shape]; -"133 Gather_352" [id=133, type=Gather]; -"134 Constant_353" [id=134, type=Constant]; -"135 Constant_354" [id=135, type=Constant]; -"136 Unsqueeze_355" [id=136, type=Unsqueeze]; -"137 Unsqueeze_356" [id=137, type=Unsqueeze]; -"138 Unsqueeze_357" [id=138, type=Unsqueeze]; -"139 Concat_358" [id=139, type=Concat]; -"140 Reshape_359" [id=140, type=Reshape]; -"141 Conv_360" [id=141, type=Conv]; -"142 Constant_361" [id=142, type=Constant]; -"143 Shape_362" [id=143, type=Shape]; -"144 Gather_363" [id=144, type=Gather]; -"145 Constant_364" [id=145, type=Constant]; -"146 Constant_365" [id=146, type=Constant]; -"147 Unsqueeze_366" [id=147, type=Unsqueeze]; -"148 Unsqueeze_367" [id=148, type=Unsqueeze]; -"149 Unsqueeze_368" [id=149, type=Unsqueeze]; -"150 Concat_369" [id=150, type=Concat]; -"151 Reshape_370" [id=151, type=Reshape]; -"152 Conv_371" [id=152, type=Conv]; -"153 Constant_372" [id=153, type=Constant]; -"154 Shape_373" [id=154, type=Shape]; -"155 Gather_374" [id=155, type=Gather]; -"156 Constant_375" [id=156, type=Constant]; -"157 Constant_376" [id=157, type=Constant]; -"158 Unsqueeze_377" [id=158, type=Unsqueeze]; -"159 Unsqueeze_378" [id=159, type=Unsqueeze]; -"160 Unsqueeze_379" [id=160, type=Unsqueeze]; -"161 Concat_380" [id=161, type=Concat]; -"162 Reshape_381" [id=162, type=Reshape]; -"163 Conv_382" [id=163, type=Conv]; -"164 Constant_383" [id=164, type=Constant]; -"165 Shape_384" [id=165, type=Shape]; -"166 Gather_385" [id=166, type=Gather]; -"167 Constant_386" [id=167, type=Constant]; -"168 Constant_387" [id=168, type=Constant]; -"169 Unsqueeze_388" [id=169, type=Unsqueeze]; -"170 Unsqueeze_389" [id=170, type=Unsqueeze]; -"171 Unsqueeze_390" [id=171, type=Unsqueeze]; -"172 Concat_391" [id=172, type=Concat]; -"173 Reshape_392" [id=173, type=Reshape]; -"174 Conv_393" [id=174, type=Conv]; -"175 Constant_394" [id=175, type=Constant]; -"176 Shape_395" [id=176, type=Shape]; -"177 Gather_396" [id=177, type=Gather]; -"178 Constant_397" [id=178, type=Constant]; -"179 Constant_398" [id=179, type=Constant]; -"180 Unsqueeze_399" [id=180, type=Unsqueeze]; -"181 Unsqueeze_400" [id=181, type=Unsqueeze]; -"182 Unsqueeze_401" [id=182, type=Unsqueeze]; -"183 Concat_402" [id=183, type=Concat]; -"184 Reshape_403" [id=184, type=Reshape]; -"185 Conv_404" [id=185, type=Conv]; -"186 Constant_405" [id=186, type=Constant]; -"187 Shape_406" [id=187, type=Shape]; -"188 Gather_407" [id=188, type=Gather]; -"189 Constant_408" [id=189, type=Constant]; -"190 Constant_409" [id=190, type=Constant]; -"191 Unsqueeze_410" [id=191, type=Unsqueeze]; -"192 Unsqueeze_411" [id=192, type=Unsqueeze]; -"193 Unsqueeze_412" [id=193, type=Unsqueeze]; -"194 Concat_413" [id=194, type=Concat]; -"195 Reshape_414" [id=195, type=Reshape]; -"196 Conv_415" [id=196, type=Conv]; -"197 Constant_416" [id=197, type=Constant]; -"198 Shape_417" [id=198, type=Shape]; -"199 Gather_418" [id=199, type=Gather]; -"200 Constant_419" [id=200, type=Constant]; -"201 Constant_420" [id=201, type=Constant]; -"202 Unsqueeze_421" [id=202, type=Unsqueeze]; -"203 Unsqueeze_422" [id=203, type=Unsqueeze]; -"204 Unsqueeze_423" [id=204, type=Unsqueeze]; -"205 Concat_424" [id=205, type=Concat]; -"206 Reshape_425" [id=206, type=Reshape]; -"207 Conv_426" [id=207, type=Conv]; -"208 Constant_427" [id=208, type=Constant]; -"209 Shape_428" [id=209, type=Shape]; -"210 Gather_429" [id=210, type=Gather]; -"211 Constant_430" [id=211, type=Constant]; -"212 Constant_431" [id=212, type=Constant]; -"213 Unsqueeze_432" [id=213, type=Unsqueeze]; -"214 Unsqueeze_433" [id=214, type=Unsqueeze]; -"215 Unsqueeze_434" [id=215, type=Unsqueeze]; -"216 Concat_435" [id=216, type=Concat]; -"217 Reshape_436" [id=217, type=Reshape]; -"218 Conv_437" [id=218, type=Conv]; -"219 Constant_438" [id=219, type=Constant]; -"220 Shape_439" [id=220, type=Shape]; -"221 Gather_440" [id=221, type=Gather]; -"222 Constant_441" [id=222, type=Constant]; -"223 Constant_442" [id=223, type=Constant]; -"224 Unsqueeze_443" [id=224, type=Unsqueeze]; -"225 Unsqueeze_444" [id=225, type=Unsqueeze]; -"226 Unsqueeze_445" [id=226, type=Unsqueeze]; -"227 Concat_446" [id=227, type=Concat]; -"228 Reshape_447" [id=228, type=Reshape]; -"229 Conv_448" [id=229, type=Conv]; -"230 Constant_449" [id=230, type=Constant]; -"231 Shape_450" [id=231, type=Shape]; -"232 Gather_451" [id=232, type=Gather]; -"233 Constant_452" [id=233, type=Constant]; -"234 Constant_453" [id=234, type=Constant]; -"235 Unsqueeze_454" [id=235, type=Unsqueeze]; -"236 Unsqueeze_455" [id=236, type=Unsqueeze]; -"237 Unsqueeze_456" [id=237, type=Unsqueeze]; -"238 Concat_457" [id=238, type=Concat]; -"239 Reshape_458" [id=239, type=Reshape]; -"240 Conv_459" [id=240, type=Conv]; -"241 Constant_460" [id=241, type=Constant]; -"242 Shape_461" [id=242, type=Shape]; -"243 Gather_462" [id=243, type=Gather]; -"244 Constant_463" [id=244, type=Constant]; -"245 Constant_464" [id=245, type=Constant]; -"246 Unsqueeze_465" [id=246, type=Unsqueeze]; -"247 Unsqueeze_466" [id=247, type=Unsqueeze]; -"248 Unsqueeze_467" [id=248, type=Unsqueeze]; -"249 Concat_468" [id=249, type=Concat]; -"250 Reshape_469" [id=250, type=Reshape]; -"251 Concat_470" [id=251, type=Concat]; -"252 Concat_471" [id=252, type=Concat]; -"253 Transpose_472" [id=253, type=Transpose]; -"254 Transpose_473" [id=254, type=Transpose]; -"255 Constant_474" [id=255, type=Constant]; -"256 Constant_475" [id=256, type=Constant]; -"257 Constant_476" [id=257, type=Constant]; -"258 Constant_477" [id=258, type=Constant]; -"259 Slice_478" [id=259, type=Slice]; -"260 Constant_479" [id=260, type=Constant]; -"261 Constant_480" [id=261, type=Constant]; -"262 Constant_481" [id=262, type=Constant]; -"263 Constant_482" [id=263, type=Constant]; -"264 Slice_483" [id=264, type=Slice]; -"265 Constant_484" [id=265, type=Constant]; -"266 Constant_485" [id=266, type=Constant]; -"267 Constant_486" [id=267, type=Constant]; -"268 Constant_487" [id=268, type=Constant]; -"269 Slice_488" [id=269, type=Slice]; -"270 Constant_489" [id=270, type=Constant]; -"271 Mul_490" [id=271, type=Mul]; -"272 Constant_491" [id=272, type=Constant]; -"273 Constant_492" [id=273, type=Constant]; -"274 Constant_493" [id=274, type=Constant]; -"275 Constant_494" [id=275, type=Constant]; -"276 Slice_495" [id=276, type=Slice]; -"277 Constant_496" [id=277, type=Constant]; -"278 Constant_497" [id=278, type=Constant]; -"279 Constant_498" [id=279, type=Constant]; -"280 Constant_499" [id=280, type=Constant]; -"281 Slice_500" [id=281, type=Slice]; -"282 Constant_501" [id=282, type=Constant]; -"283 Constant_502" [id=283, type=Constant]; -"284 Constant_503" [id=284, type=Constant]; -"285 Constant_504" [id=285, type=Constant]; -"286 Slice_505" [id=286, type=Slice]; -"287 Constant_506" [id=287, type=Constant]; -"288 Mul_507" [id=288, type=Mul]; -"289 Constant_508" [id=289, type=Constant]; -"290 Mul_509" [id=290, type=Mul]; -"291 Constant_510" [id=291, type=Constant]; -"292 Add_511" [id=292, type=Add]; -"293 Exp_512" [id=293, type=Exp]; -"294 Constant_513" [id=294, type=Constant]; -"295 Mul_514" [id=295, type=Mul]; -"296 Constant_515" [id=296, type=Constant]; -"297 Constant_516" [id=297, type=Constant]; -"298 Constant_517" [id=298, type=Constant]; -"299 Constant_518" [id=299, type=Constant]; -"300 Slice_519" [id=300, type=Slice]; -"301 Constant_520" [id=301, type=Constant]; -"302 Constant_521" [id=302, type=Constant]; -"303 Constant_522" [id=303, type=Constant]; -"304 Constant_523" [id=304, type=Constant]; -"305 Slice_524" [id=305, type=Slice]; -"306 Constant_525" [id=306, type=Constant]; -"307 Constant_526" [id=307, type=Constant]; -"308 Constant_527" [id=308, type=Constant]; -"309 Constant_528" [id=309, type=Constant]; -"310 Slice_529" [id=310, type=Slice]; -"311 Squeeze_530" [id=311, type=Squeeze]; -"312 Constant_531" [id=312, type=Constant]; -"313 Constant_532" [id=313, type=Constant]; -"314 Constant_533" [id=314, type=Constant]; -"315 Constant_534" [id=315, type=Constant]; -"316 Slice_535" [id=316, type=Slice]; -"317 Constant_536" [id=317, type=Constant]; -"318 Constant_537" [id=318, type=Constant]; -"319 Constant_538" [id=319, type=Constant]; -"320 Constant_539" [id=320, type=Constant]; -"321 Slice_540" [id=321, type=Slice]; -"322 Constant_541" [id=322, type=Constant]; -"323 Constant_542" [id=323, type=Constant]; -"324 Constant_543" [id=324, type=Constant]; -"325 Constant_544" [id=325, type=Constant]; -"326 Slice_545" [id=326, type=Slice]; -"327 Squeeze_546" [id=327, type=Squeeze]; -"328 Constant_547" [id=328, type=Constant]; -"329 Mul_548" [id=329, type=Mul]; -"330 Sub_549" [id=330, type=Sub]; -"331 Constant_550" [id=331, type=Constant]; -"332 Constant_551" [id=332, type=Constant]; -"333 Constant_552" [id=333, type=Constant]; -"334 Constant_553" [id=334, type=Constant]; -"335 Slice_554" [id=335, type=Slice]; -"336 Constant_555" [id=336, type=Constant]; -"337 Constant_556" [id=337, type=Constant]; -"338 Constant_557" [id=338, type=Constant]; -"339 Constant_558" [id=339, type=Constant]; -"340 Slice_559" [id=340, type=Slice]; -"341 Constant_560" [id=341, type=Constant]; -"342 Constant_561" [id=342, type=Constant]; -"343 Constant_562" [id=343, type=Constant]; -"344 Constant_563" [id=344, type=Constant]; -"345 Slice_564" [id=345, type=Slice]; -"346 Squeeze_565" [id=346, type=Squeeze]; -"347 Constant_566" [id=347, type=Constant]; -"348 Constant_567" [id=348, type=Constant]; -"349 Constant_568" [id=349, type=Constant]; -"350 Constant_569" [id=350, type=Constant]; -"351 Slice_570" [id=351, type=Slice]; -"352 Constant_571" [id=352, type=Constant]; -"353 Constant_572" [id=353, type=Constant]; -"354 Constant_573" [id=354, type=Constant]; -"355 Constant_574" [id=355, type=Constant]; -"356 Slice_575" [id=356, type=Slice]; -"357 Constant_576" [id=357, type=Constant]; -"358 Constant_577" [id=358, type=Constant]; -"359 Constant_578" [id=359, type=Constant]; -"360 Constant_579" [id=360, type=Constant]; -"361 Slice_580" [id=361, type=Slice]; -"362 Squeeze_581" [id=362, type=Squeeze]; -"363 Constant_582" [id=363, type=Constant]; -"364 Mul_583" [id=364, type=Mul]; -"365 Sub_584" [id=365, type=Sub]; -"366 Constant_585" [id=366, type=Constant]; -"367 Constant_586" [id=367, type=Constant]; -"368 Constant_587" [id=368, type=Constant]; -"369 Constant_588" [id=369, type=Constant]; -"370 Slice_589" [id=370, type=Slice]; -"371 Constant_590" [id=371, type=Constant]; -"372 Constant_591" [id=372, type=Constant]; -"373 Constant_592" [id=373, type=Constant]; -"374 Constant_593" [id=374, type=Constant]; -"375 Slice_594" [id=375, type=Slice]; -"376 Constant_595" [id=376, type=Constant]; -"377 Constant_596" [id=377, type=Constant]; -"378 Constant_597" [id=378, type=Constant]; -"379 Constant_598" [id=379, type=Constant]; -"380 Slice_599" [id=380, type=Slice]; -"381 Squeeze_600" [id=381, type=Squeeze]; -"382 Constant_601" [id=382, type=Constant]; -"383 Constant_602" [id=383, type=Constant]; -"384 Constant_603" [id=384, type=Constant]; -"385 Constant_604" [id=385, type=Constant]; -"386 Slice_605" [id=386, type=Slice]; -"387 Constant_606" [id=387, type=Constant]; -"388 Constant_607" [id=388, type=Constant]; -"389 Constant_608" [id=389, type=Constant]; -"390 Constant_609" [id=390, type=Constant]; -"391 Slice_610" [id=391, type=Slice]; -"392 Constant_611" [id=392, type=Constant]; -"393 Constant_612" [id=393, type=Constant]; -"394 Constant_613" [id=394, type=Constant]; -"395 Constant_614" [id=395, type=Constant]; -"396 Slice_615" [id=396, type=Slice]; -"397 Squeeze_616" [id=397, type=Squeeze]; -"398 Constant_617" [id=398, type=Constant]; -"399 Mul_618" [id=399, type=Mul]; -"400 Add_619" [id=400, type=Add]; -"401 Constant_620" [id=401, type=Constant]; -"402 Constant_621" [id=402, type=Constant]; -"403 Constant_622" [id=403, type=Constant]; -"404 Constant_623" [id=404, type=Constant]; -"405 Slice_624" [id=405, type=Slice]; -"406 Constant_625" [id=406, type=Constant]; -"407 Constant_626" [id=407, type=Constant]; -"408 Constant_627" [id=408, type=Constant]; -"409 Constant_628" [id=409, type=Constant]; -"410 Slice_629" [id=410, type=Slice]; -"411 Constant_630" [id=411, type=Constant]; -"412 Constant_631" [id=412, type=Constant]; -"413 Constant_632" [id=413, type=Constant]; -"414 Constant_633" [id=414, type=Constant]; -"415 Slice_634" [id=415, type=Slice]; -"416 Squeeze_635" [id=416, type=Squeeze]; -"417 Constant_636" [id=417, type=Constant]; -"418 Constant_637" [id=418, type=Constant]; -"419 Constant_638" [id=419, type=Constant]; -"420 Constant_639" [id=420, type=Constant]; -"421 Slice_640" [id=421, type=Slice]; -"422 Constant_641" [id=422, type=Constant]; -"423 Constant_642" [id=423, type=Constant]; -"424 Constant_643" [id=424, type=Constant]; -"425 Constant_644" [id=425, type=Constant]; -"426 Slice_645" [id=426, type=Slice]; -"427 Constant_646" [id=427, type=Constant]; -"428 Constant_647" [id=428, type=Constant]; -"429 Constant_648" [id=429, type=Constant]; -"430 Constant_649" [id=430, type=Constant]; -"431 Slice_650" [id=431, type=Slice]; -"432 Squeeze_651" [id=432, type=Squeeze]; -"433 Constant_652" [id=433, type=Constant]; -"434 Mul_653" [id=434, type=Mul]; -"435 Add_654" [id=435, type=Add]; -"436 Unsqueeze_655" [id=436, type=Unsqueeze]; -"437 Unsqueeze_656" [id=437, type=Unsqueeze]; -"438 Unsqueeze_657" [id=438, type=Unsqueeze]; -"439 Unsqueeze_658" [id=439, type=Unsqueeze]; -"440 Concat_659" [id=440, type=Concat]; -"441 Softmax_660" [id=441, type=Softmax]; -"442 Transpose_661" [id=442, type=Transpose]; -"443 Constant_662" [id=443, type=Constant]; -"444 Constant_663" [id=444, type=Constant]; -"445 Constant_664" [id=445, type=Constant]; -"446 Constant_665" [id=446, type=Constant]; -"447 Slice_666" [id=447, type=Slice]; -"448 Constant_667" [id=448, type=Constant]; -"449 Constant_668" [id=449, type=Constant]; -"450 Constant_669" [id=450, type=Constant]; -"451 Constant_670" [id=451, type=Constant]; -"452 Slice_671" [id=452, type=Slice]; -"453 Constant_672" [id=453, type=Constant]; -"454 Constant_673" [id=454, type=Constant]; -"455 Constant_674" [id=455, type=Constant]; -"456 Constant_675" [id=456, type=Constant]; -"457 Slice_676" [id=457, type=Slice]; -"458 Constant_677" [id=458, type=Constant]; -"459 ConstantOfShape_678" [id=459, type=ConstantOfShape]; -"460 Constant_679" [id=460, type=Constant]; -"461 ConstantOfShape_680" [id=461, type=ConstantOfShape]; -"462 Constant_681" [id=462, type=Constant]; -"463 ConstantOfShape_682" [id=463, type=ConstantOfShape]; -"464 NonMaxSuppression_683" [id=464, type=NonMaxSuppression]; -"465 Constant_684" [id=465, type=Constant]; -"466 Constant_685" [id=466, type=Constant]; -"467 Constant_686" [id=467, type=Constant]; -"468 Constant_687" [id=468, type=Constant]; -"469 Slice_688" [id=469, type=Slice]; -"470 Constant_689" [id=470, type=Constant]; -"471 Gather_690" [id=471, type=Gather]; -"472 Constant_691" [id=472, type=Constant]; -"473 Constant_692" [id=473, type=Constant]; -"474 Constant_693" [id=474, type=Constant]; -"475 Constant_694" [id=475, type=Constant]; -"476 Slice_695" [id=476, type=Slice]; -"477 Constant_696" [id=477, type=Constant]; -"478 Gather_697" [id=478, type=Gather]; -"479 Constant_698" [id=479, type=Constant]; -"480 Reshape_699" [id=480, type=Reshape]; -"481 Shape_700" [id=481, type=Shape]; -"482 Constant_701" [id=482, type=Constant]; -"483 Gather_702" [id=483, type=Gather]; -"484 Mul_703" [id=484, type=Mul]; -"485 Add_704" [id=485, type=Add]; -"486 Cast_705" [id=486, type=Cast]; -"487 Gather_706" [id=487, type=Gather]; -"488 Shape_707" [id=488, type=Shape]; -"489 Constant_708" [id=489, type=Constant]; -"490 Gather_709" [id=490, type=Gather]; -"491 Unsqueeze_710" [id=491, type=Unsqueeze]; -"492 Constant_711" [id=492, type=Constant]; -"493 Concat_712" [id=493, type=Concat]; -"494 Cast_713" [id=494, type=Cast]; -"495 ReduceMin_714" [id=495, type=ReduceMin]; -"496 Cast_715" [id=496, type=Cast]; -"497 Unsqueeze_716" [id=497, type=Unsqueeze]; -"498 TopK_717" [id=498, type=TopK]; -"499 Squeeze_719" [id=499, type=Squeeze]; -"500 Gather_720" [id=500, type=Gather]; -"501 Constant_721" [id=501, type=Constant]; -"502 Constant_722" [id=502, type=Constant]; -"503 Constant_723" [id=503, type=Constant]; -"504 Constant_724" [id=504, type=Constant]; -"505 Slice_725" [id=505, type=Slice]; -"506 Cast_726" [id=506, type=Cast]; -"507 Gather_727" [id=507, type=Gather]; -"508 Unsqueeze_bboxes" [id=508, type=Unsqueeze]; -"509 Gather_729" [id=509, type=Gather]; -"510 Unsqueeze_730" [id=510, type=Unsqueeze]; -"511 Constant_731" [id=511, type=Constant]; -"512 Add_labels" [id=512, type=Add]; -"513 Gather_733" [id=513, type=Gather]; -"514 Unsqueeze_scores" [id=514, type=Unsqueeze]; -"515 nncf_model_input_0" [id=515, type=nncf_model_input]; -"516 nncf_model_output_0" [id=516, type=nncf_model_output]; -"517 nncf_model_output_1" [id=517, type=nncf_model_output]; -"518 nncf_model_output_2" [id=518, type=nncf_model_output]; +"125 Constant_nncf_125" [id=125, type=Constant]; +"126 Unsqueeze_344" [id=126, type=Unsqueeze]; +"127 Constant_nncf_127" [id=127, type=Constant]; +"128 Unsqueeze_345" [id=128, type=Unsqueeze]; +"129 Constant_nncf_129" [id=129, type=Constant]; +"130 Unsqueeze_346" [id=130, type=Unsqueeze]; +"131 Concat_347" [id=131, type=Concat]; +"132 Reshape_348" [id=132, type=Reshape]; +"133 Conv_349" [id=133, type=Conv]; +"134 Constant_350" [id=134, type=Constant]; +"135 Shape_351" [id=135, type=Shape]; +"136 Gather_352" [id=136, type=Gather]; +"137 Constant_353" [id=137, type=Constant]; +"138 Constant_354" [id=138, type=Constant]; +"139 Constant_nncf_139" [id=139, type=Constant]; +"140 Unsqueeze_355" [id=140, type=Unsqueeze]; +"141 Constant_nncf_141" [id=141, type=Constant]; +"142 Unsqueeze_356" [id=142, type=Unsqueeze]; +"143 Constant_nncf_143" [id=143, type=Constant]; +"144 Unsqueeze_357" [id=144, type=Unsqueeze]; +"145 Concat_358" [id=145, type=Concat]; +"146 Reshape_359" [id=146, type=Reshape]; +"147 Conv_360" [id=147, type=Conv]; +"148 Constant_361" [id=148, type=Constant]; +"149 Shape_362" [id=149, type=Shape]; +"150 Gather_363" [id=150, type=Gather]; +"151 Constant_364" [id=151, type=Constant]; +"152 Constant_365" [id=152, type=Constant]; +"153 Constant_nncf_153" [id=153, type=Constant]; +"154 Unsqueeze_366" [id=154, type=Unsqueeze]; +"155 Constant_nncf_155" [id=155, type=Constant]; +"156 Unsqueeze_367" [id=156, type=Unsqueeze]; +"157 Constant_nncf_157" [id=157, type=Constant]; +"158 Unsqueeze_368" [id=158, type=Unsqueeze]; +"159 Concat_369" [id=159, type=Concat]; +"160 Reshape_370" [id=160, type=Reshape]; +"161 Conv_371" [id=161, type=Conv]; +"162 Constant_372" [id=162, type=Constant]; +"163 Shape_373" [id=163, type=Shape]; +"164 Gather_374" [id=164, type=Gather]; +"165 Constant_375" [id=165, type=Constant]; +"166 Constant_376" [id=166, type=Constant]; +"167 Constant_nncf_167" [id=167, type=Constant]; +"168 Unsqueeze_377" [id=168, type=Unsqueeze]; +"169 Constant_nncf_169" [id=169, type=Constant]; +"170 Unsqueeze_378" [id=170, type=Unsqueeze]; +"171 Constant_nncf_171" [id=171, type=Constant]; +"172 Unsqueeze_379" [id=172, type=Unsqueeze]; +"173 Concat_380" [id=173, type=Concat]; +"174 Reshape_381" [id=174, type=Reshape]; +"175 Conv_382" [id=175, type=Conv]; +"176 Constant_383" [id=176, type=Constant]; +"177 Shape_384" [id=177, type=Shape]; +"178 Gather_385" [id=178, type=Gather]; +"179 Constant_386" [id=179, type=Constant]; +"180 Constant_387" [id=180, type=Constant]; +"181 Constant_nncf_181" [id=181, type=Constant]; +"182 Unsqueeze_388" [id=182, type=Unsqueeze]; +"183 Constant_nncf_183" [id=183, type=Constant]; +"184 Unsqueeze_389" [id=184, type=Unsqueeze]; +"185 Constant_nncf_185" [id=185, type=Constant]; +"186 Unsqueeze_390" [id=186, type=Unsqueeze]; +"187 Concat_391" [id=187, type=Concat]; +"188 Reshape_392" [id=188, type=Reshape]; +"189 Conv_393" [id=189, type=Conv]; +"190 Constant_394" [id=190, type=Constant]; +"191 Shape_395" [id=191, type=Shape]; +"192 Gather_396" [id=192, type=Gather]; +"193 Constant_397" [id=193, type=Constant]; +"194 Constant_398" [id=194, type=Constant]; +"195 Constant_nncf_195" [id=195, type=Constant]; +"196 Unsqueeze_399" [id=196, type=Unsqueeze]; +"197 Constant_nncf_197" [id=197, type=Constant]; +"198 Unsqueeze_400" [id=198, type=Unsqueeze]; +"199 Constant_nncf_199" [id=199, type=Constant]; +"200 Unsqueeze_401" [id=200, type=Unsqueeze]; +"201 Concat_402" [id=201, type=Concat]; +"202 Reshape_403" [id=202, type=Reshape]; +"203 Conv_404" [id=203, type=Conv]; +"204 Constant_405" [id=204, type=Constant]; +"205 Shape_406" [id=205, type=Shape]; +"206 Gather_407" [id=206, type=Gather]; +"207 Constant_408" [id=207, type=Constant]; +"208 Constant_409" [id=208, type=Constant]; +"209 Constant_nncf_209" [id=209, type=Constant]; +"210 Unsqueeze_410" [id=210, type=Unsqueeze]; +"211 Constant_nncf_211" [id=211, type=Constant]; +"212 Unsqueeze_411" [id=212, type=Unsqueeze]; +"213 Constant_nncf_213" [id=213, type=Constant]; +"214 Unsqueeze_412" [id=214, type=Unsqueeze]; +"215 Concat_413" [id=215, type=Concat]; +"216 Reshape_414" [id=216, type=Reshape]; +"217 Conv_415" [id=217, type=Conv]; +"218 Constant_416" [id=218, type=Constant]; +"219 Shape_417" [id=219, type=Shape]; +"220 Gather_418" [id=220, type=Gather]; +"221 Constant_419" [id=221, type=Constant]; +"222 Constant_420" [id=222, type=Constant]; +"223 Constant_nncf_223" [id=223, type=Constant]; +"224 Unsqueeze_421" [id=224, type=Unsqueeze]; +"225 Constant_nncf_225" [id=225, type=Constant]; +"226 Unsqueeze_422" [id=226, type=Unsqueeze]; +"227 Constant_nncf_227" [id=227, type=Constant]; +"228 Unsqueeze_423" [id=228, type=Unsqueeze]; +"229 Concat_424" [id=229, type=Concat]; +"230 Reshape_425" [id=230, type=Reshape]; +"231 Conv_426" [id=231, type=Conv]; +"232 Constant_427" [id=232, type=Constant]; +"233 Shape_428" [id=233, type=Shape]; +"234 Gather_429" [id=234, type=Gather]; +"235 Constant_430" [id=235, type=Constant]; +"236 Constant_431" [id=236, type=Constant]; +"237 Constant_nncf_237" [id=237, type=Constant]; +"238 Unsqueeze_432" [id=238, type=Unsqueeze]; +"239 Constant_nncf_239" [id=239, type=Constant]; +"240 Unsqueeze_433" [id=240, type=Unsqueeze]; +"241 Constant_nncf_241" [id=241, type=Constant]; +"242 Unsqueeze_434" [id=242, type=Unsqueeze]; +"243 Concat_435" [id=243, type=Concat]; +"244 Reshape_436" [id=244, type=Reshape]; +"245 Conv_437" [id=245, type=Conv]; +"246 Constant_438" [id=246, type=Constant]; +"247 Shape_439" [id=247, type=Shape]; +"248 Gather_440" [id=248, type=Gather]; +"249 Constant_441" [id=249, type=Constant]; +"250 Constant_442" [id=250, type=Constant]; +"251 Constant_nncf_251" [id=251, type=Constant]; +"252 Unsqueeze_443" [id=252, type=Unsqueeze]; +"253 Constant_nncf_253" [id=253, type=Constant]; +"254 Unsqueeze_444" [id=254, type=Unsqueeze]; +"255 Constant_nncf_255" [id=255, type=Constant]; +"256 Unsqueeze_445" [id=256, type=Unsqueeze]; +"257 Concat_446" [id=257, type=Concat]; +"258 Reshape_447" [id=258, type=Reshape]; +"259 Conv_448" [id=259, type=Conv]; +"260 Constant_449" [id=260, type=Constant]; +"261 Shape_450" [id=261, type=Shape]; +"262 Gather_451" [id=262, type=Gather]; +"263 Constant_452" [id=263, type=Constant]; +"264 Constant_453" [id=264, type=Constant]; +"265 Constant_nncf_265" [id=265, type=Constant]; +"266 Unsqueeze_454" [id=266, type=Unsqueeze]; +"267 Constant_nncf_267" [id=267, type=Constant]; +"268 Unsqueeze_455" [id=268, type=Unsqueeze]; +"269 Constant_nncf_269" [id=269, type=Constant]; +"270 Unsqueeze_456" [id=270, type=Unsqueeze]; +"271 Concat_457" [id=271, type=Concat]; +"272 Reshape_458" [id=272, type=Reshape]; +"273 Conv_459" [id=273, type=Conv]; +"274 Constant_460" [id=274, type=Constant]; +"275 Shape_461" [id=275, type=Shape]; +"276 Gather_462" [id=276, type=Gather]; +"277 Constant_463" [id=277, type=Constant]; +"278 Constant_464" [id=278, type=Constant]; +"279 Constant_nncf_279" [id=279, type=Constant]; +"280 Unsqueeze_465" [id=280, type=Unsqueeze]; +"281 Constant_nncf_281" [id=281, type=Constant]; +"282 Unsqueeze_466" [id=282, type=Unsqueeze]; +"283 Constant_nncf_283" [id=283, type=Constant]; +"284 Unsqueeze_467" [id=284, type=Unsqueeze]; +"285 Concat_468" [id=285, type=Concat]; +"286 Reshape_469" [id=286, type=Reshape]; +"287 Concat_470" [id=287, type=Concat]; +"288 Concat_471" [id=288, type=Concat]; +"289 Transpose_472" [id=289, type=Transpose]; +"290 Transpose_473" [id=290, type=Transpose]; +"291 Constant_474" [id=291, type=Constant]; +"292 Constant_475" [id=292, type=Constant]; +"293 Constant_476" [id=293, type=Constant]; +"294 Constant_477" [id=294, type=Constant]; +"295 Slice_478" [id=295, type=Slice]; +"296 Constant_479" [id=296, type=Constant]; +"297 Constant_480" [id=297, type=Constant]; +"298 Constant_481" [id=298, type=Constant]; +"299 Constant_482" [id=299, type=Constant]; +"300 Slice_483" [id=300, type=Slice]; +"301 Constant_484" [id=301, type=Constant]; +"302 Constant_485" [id=302, type=Constant]; +"303 Constant_486" [id=303, type=Constant]; +"304 Constant_487" [id=304, type=Constant]; +"305 Slice_488" [id=305, type=Slice]; +"306 Constant_489" [id=306, type=Constant]; +"307 Mul_490" [id=307, type=Mul]; +"308 Constant_491" [id=308, type=Constant]; +"309 Constant_492" [id=309, type=Constant]; +"310 Constant_493" [id=310, type=Constant]; +"311 Constant_494" [id=311, type=Constant]; +"312 Slice_495" [id=312, type=Slice]; +"313 Constant_496" [id=313, type=Constant]; +"314 Constant_497" [id=314, type=Constant]; +"315 Constant_498" [id=315, type=Constant]; +"316 Constant_499" [id=316, type=Constant]; +"317 Slice_500" [id=317, type=Slice]; +"318 Constant_501" [id=318, type=Constant]; +"319 Constant_502" [id=319, type=Constant]; +"320 Constant_503" [id=320, type=Constant]; +"321 Constant_504" [id=321, type=Constant]; +"322 Slice_505" [id=322, type=Slice]; +"323 Constant_506" [id=323, type=Constant]; +"324 Mul_507" [id=324, type=Mul]; +"325 Constant_508" [id=325, type=Constant]; +"326 Mul_509" [id=326, type=Mul]; +"327 Constant_510" [id=327, type=Constant]; +"328 Add_511" [id=328, type=Add]; +"329 Exp_512" [id=329, type=Exp]; +"330 Constant_513" [id=330, type=Constant]; +"331 Mul_514" [id=331, type=Mul]; +"332 Constant_515" [id=332, type=Constant]; +"333 Constant_516" [id=333, type=Constant]; +"334 Constant_517" [id=334, type=Constant]; +"335 Constant_518" [id=335, type=Constant]; +"336 Slice_519" [id=336, type=Slice]; +"337 Constant_520" [id=337, type=Constant]; +"338 Constant_521" [id=338, type=Constant]; +"339 Constant_522" [id=339, type=Constant]; +"340 Constant_523" [id=340, type=Constant]; +"341 Slice_524" [id=341, type=Slice]; +"342 Constant_525" [id=342, type=Constant]; +"343 Constant_526" [id=343, type=Constant]; +"344 Constant_527" [id=344, type=Constant]; +"345 Constant_528" [id=345, type=Constant]; +"346 Slice_529" [id=346, type=Slice]; +"347 Constant_nncf_347" [id=347, type=Constant]; +"348 Squeeze_530" [id=348, type=Squeeze]; +"349 Constant_531" [id=349, type=Constant]; +"350 Constant_532" [id=350, type=Constant]; +"351 Constant_533" [id=351, type=Constant]; +"352 Constant_534" [id=352, type=Constant]; +"353 Slice_535" [id=353, type=Slice]; +"354 Constant_536" [id=354, type=Constant]; +"355 Constant_537" [id=355, type=Constant]; +"356 Constant_538" [id=356, type=Constant]; +"357 Constant_539" [id=357, type=Constant]; +"358 Slice_540" [id=358, type=Slice]; +"359 Constant_541" [id=359, type=Constant]; +"360 Constant_542" [id=360, type=Constant]; +"361 Constant_543" [id=361, type=Constant]; +"362 Constant_544" [id=362, type=Constant]; +"363 Slice_545" [id=363, type=Slice]; +"364 Constant_nncf_364" [id=364, type=Constant]; +"365 Squeeze_546" [id=365, type=Squeeze]; +"366 Constant_547" [id=366, type=Constant]; +"367 Mul_548" [id=367, type=Mul]; +"368 Sub_549" [id=368, type=Sub]; +"369 Constant_550" [id=369, type=Constant]; +"370 Constant_551" [id=370, type=Constant]; +"371 Constant_552" [id=371, type=Constant]; +"372 Constant_553" [id=372, type=Constant]; +"373 Slice_554" [id=373, type=Slice]; +"374 Constant_555" [id=374, type=Constant]; +"375 Constant_556" [id=375, type=Constant]; +"376 Constant_557" [id=376, type=Constant]; +"377 Constant_558" [id=377, type=Constant]; +"378 Slice_559" [id=378, type=Slice]; +"379 Constant_560" [id=379, type=Constant]; +"380 Constant_561" [id=380, type=Constant]; +"381 Constant_562" [id=381, type=Constant]; +"382 Constant_563" [id=382, type=Constant]; +"383 Slice_564" [id=383, type=Slice]; +"384 Constant_nncf_384" [id=384, type=Constant]; +"385 Squeeze_565" [id=385, type=Squeeze]; +"386 Constant_566" [id=386, type=Constant]; +"387 Constant_567" [id=387, type=Constant]; +"388 Constant_568" [id=388, type=Constant]; +"389 Constant_569" [id=389, type=Constant]; +"390 Slice_570" [id=390, type=Slice]; +"391 Constant_571" [id=391, type=Constant]; +"392 Constant_572" [id=392, type=Constant]; +"393 Constant_573" [id=393, type=Constant]; +"394 Constant_574" [id=394, type=Constant]; +"395 Slice_575" [id=395, type=Slice]; +"396 Constant_576" [id=396, type=Constant]; +"397 Constant_577" [id=397, type=Constant]; +"398 Constant_578" [id=398, type=Constant]; +"399 Constant_579" [id=399, type=Constant]; +"400 Slice_580" [id=400, type=Slice]; +"401 Constant_nncf_401" [id=401, type=Constant]; +"402 Squeeze_581" [id=402, type=Squeeze]; +"403 Constant_582" [id=403, type=Constant]; +"404 Mul_583" [id=404, type=Mul]; +"405 Sub_584" [id=405, type=Sub]; +"406 Constant_585" [id=406, type=Constant]; +"407 Constant_586" [id=407, type=Constant]; +"408 Constant_587" [id=408, type=Constant]; +"409 Constant_588" [id=409, type=Constant]; +"410 Slice_589" [id=410, type=Slice]; +"411 Constant_590" [id=411, type=Constant]; +"412 Constant_591" [id=412, type=Constant]; +"413 Constant_592" [id=413, type=Constant]; +"414 Constant_593" [id=414, type=Constant]; +"415 Slice_594" [id=415, type=Slice]; +"416 Constant_595" [id=416, type=Constant]; +"417 Constant_596" [id=417, type=Constant]; +"418 Constant_597" [id=418, type=Constant]; +"419 Constant_598" [id=419, type=Constant]; +"420 Slice_599" [id=420, type=Slice]; +"421 Constant_nncf_421" [id=421, type=Constant]; +"422 Squeeze_600" [id=422, type=Squeeze]; +"423 Constant_601" [id=423, type=Constant]; +"424 Constant_602" [id=424, type=Constant]; +"425 Constant_603" [id=425, type=Constant]; +"426 Constant_604" [id=426, type=Constant]; +"427 Slice_605" [id=427, type=Slice]; +"428 Constant_606" [id=428, type=Constant]; +"429 Constant_607" [id=429, type=Constant]; +"430 Constant_608" [id=430, type=Constant]; +"431 Constant_609" [id=431, type=Constant]; +"432 Slice_610" [id=432, type=Slice]; +"433 Constant_611" [id=433, type=Constant]; +"434 Constant_612" [id=434, type=Constant]; +"435 Constant_613" [id=435, type=Constant]; +"436 Constant_614" [id=436, type=Constant]; +"437 Slice_615" [id=437, type=Slice]; +"438 Constant_nncf_438" [id=438, type=Constant]; +"439 Squeeze_616" [id=439, type=Squeeze]; +"440 Constant_617" [id=440, type=Constant]; +"441 Mul_618" [id=441, type=Mul]; +"442 Add_619" [id=442, type=Add]; +"443 Constant_620" [id=443, type=Constant]; +"444 Constant_621" [id=444, type=Constant]; +"445 Constant_622" [id=445, type=Constant]; +"446 Constant_623" [id=446, type=Constant]; +"447 Slice_624" [id=447, type=Slice]; +"448 Constant_625" [id=448, type=Constant]; +"449 Constant_626" [id=449, type=Constant]; +"450 Constant_627" [id=450, type=Constant]; +"451 Constant_628" [id=451, type=Constant]; +"452 Slice_629" [id=452, type=Slice]; +"453 Constant_630" [id=453, type=Constant]; +"454 Constant_631" [id=454, type=Constant]; +"455 Constant_632" [id=455, type=Constant]; +"456 Constant_633" [id=456, type=Constant]; +"457 Slice_634" [id=457, type=Slice]; +"458 Constant_nncf_458" [id=458, type=Constant]; +"459 Squeeze_635" [id=459, type=Squeeze]; +"460 Constant_636" [id=460, type=Constant]; +"461 Constant_637" [id=461, type=Constant]; +"462 Constant_638" [id=462, type=Constant]; +"463 Constant_639" [id=463, type=Constant]; +"464 Slice_640" [id=464, type=Slice]; +"465 Constant_641" [id=465, type=Constant]; +"466 Constant_642" [id=466, type=Constant]; +"467 Constant_643" [id=467, type=Constant]; +"468 Constant_644" [id=468, type=Constant]; +"469 Slice_645" [id=469, type=Slice]; +"470 Constant_646" [id=470, type=Constant]; +"471 Constant_647" [id=471, type=Constant]; +"472 Constant_648" [id=472, type=Constant]; +"473 Constant_649" [id=473, type=Constant]; +"474 Slice_650" [id=474, type=Slice]; +"475 Constant_nncf_475" [id=475, type=Constant]; +"476 Squeeze_651" [id=476, type=Squeeze]; +"477 Constant_652" [id=477, type=Constant]; +"478 Mul_653" [id=478, type=Mul]; +"479 Add_654" [id=479, type=Add]; +"480 Constant_nncf_480" [id=480, type=Constant]; +"481 Unsqueeze_655" [id=481, type=Unsqueeze]; +"482 Constant_nncf_482" [id=482, type=Constant]; +"483 Unsqueeze_656" [id=483, type=Unsqueeze]; +"484 Constant_nncf_484" [id=484, type=Constant]; +"485 Unsqueeze_657" [id=485, type=Unsqueeze]; +"486 Constant_nncf_486" [id=486, type=Constant]; +"487 Unsqueeze_658" [id=487, type=Unsqueeze]; +"488 Concat_659" [id=488, type=Concat]; +"489 Shape_nncf_489" [id=489, type=Shape]; +"490 Flatten_nncf_490" [id=490, type=Flatten]; +"491 Softmax_660" [id=491, type=Softmax]; +"492 Reshape_nncf_492" [id=492, type=Reshape]; +"493 Transpose_661" [id=493, type=Transpose]; +"494 Constant_662" [id=494, type=Constant]; +"495 Constant_663" [id=495, type=Constant]; +"496 Constant_664" [id=496, type=Constant]; +"497 Constant_665" [id=497, type=Constant]; +"498 Slice_666" [id=498, type=Slice]; +"499 Constant_667" [id=499, type=Constant]; +"500 Constant_668" [id=500, type=Constant]; +"501 Constant_669" [id=501, type=Constant]; +"502 Constant_670" [id=502, type=Constant]; +"503 Slice_671" [id=503, type=Slice]; +"504 Constant_672" [id=504, type=Constant]; +"505 Constant_673" [id=505, type=Constant]; +"506 Constant_674" [id=506, type=Constant]; +"507 Constant_675" [id=507, type=Constant]; +"508 Slice_676" [id=508, type=Slice]; +"509 Constant_677" [id=509, type=Constant]; +"510 ConstantOfShape_678" [id=510, type=ConstantOfShape]; +"511 Constant_679" [id=511, type=Constant]; +"512 ConstantOfShape_680" [id=512, type=ConstantOfShape]; +"513 Constant_681" [id=513, type=Constant]; +"514 ConstantOfShape_682" [id=514, type=ConstantOfShape]; +"515 NonMaxSuppression_683" [id=515, type=NonMaxSuppression]; +"516 Constant_684" [id=516, type=Constant]; +"517 Constant_685" [id=517, type=Constant]; +"518 Constant_686" [id=518, type=Constant]; +"519 Constant_687" [id=519, type=Constant]; +"520 Slice_688" [id=520, type=Slice]; +"521 Constant_689" [id=521, type=Constant]; +"522 Gather_690" [id=522, type=Gather]; +"523 Constant_691" [id=523, type=Constant]; +"524 Constant_692" [id=524, type=Constant]; +"525 Constant_693" [id=525, type=Constant]; +"526 Constant_694" [id=526, type=Constant]; +"527 Slice_695" [id=527, type=Slice]; +"528 Constant_696" [id=528, type=Constant]; +"529 Gather_697" [id=529, type=Gather]; +"530 Constant_698" [id=530, type=Constant]; +"531 Reshape_699" [id=531, type=Reshape]; +"532 Shape_700" [id=532, type=Shape]; +"533 Constant_701" [id=533, type=Constant]; +"534 Gather_702" [id=534, type=Gather]; +"535 Mul_703" [id=535, type=Mul]; +"536 Add_704" [id=536, type=Add]; +"537 Cast_705" [id=537, type=Cast]; +"538 Gather_706" [id=538, type=Gather]; +"539 Shape_707" [id=539, type=Shape]; +"540 Constant_708" [id=540, type=Constant]; +"541 Gather_709" [id=541, type=Gather]; +"542 Constant_nncf_542" [id=542, type=Constant]; +"543 Unsqueeze_710" [id=543, type=Unsqueeze]; +"544 Constant_711" [id=544, type=Constant]; +"545 Concat_712" [id=545, type=Concat]; +"546 Cast_713" [id=546, type=Cast]; +"547 ReduceMin_714" [id=547, type=ReduceMin]; +"548 Cast_715" [id=548, type=Cast]; +"549 Constant_nncf_549" [id=549, type=Constant]; +"550 Unsqueeze_716" [id=550, type=Unsqueeze]; +"551 TopK_717" [id=551, type=TopK]; +"552 Constant_nncf_552" [id=552, type=Constant]; +"553 Squeeze_719" [id=553, type=Squeeze]; +"554 Gather_720" [id=554, type=Gather]; +"555 Constant_721" [id=555, type=Constant]; +"556 Constant_722" [id=556, type=Constant]; +"557 Constant_723" [id=557, type=Constant]; +"558 Constant_724" [id=558, type=Constant]; +"559 Slice_725" [id=559, type=Slice]; +"560 Cast_726" [id=560, type=Cast]; +"561 Gather_727" [id=561, type=Gather]; +"562 Constant_nncf_562" [id=562, type=Constant]; +"563 Unsqueeze_bboxes" [id=563, type=Unsqueeze]; +"564 Gather_729" [id=564, type=Gather]; +"565 Constant_nncf_565" [id=565, type=Constant]; +"566 Unsqueeze_730" [id=566, type=Unsqueeze]; +"567 Constant_731" [id=567, type=Constant]; +"568 Add_labels" [id=568, type=Add]; +"569 Gather_733" [id=569, type=Gather]; +"570 Constant_nncf_570" [id=570, type=Constant]; +"571 Unsqueeze_scores" [id=571, type=Unsqueeze]; +"572 nncf_model_input_0" [id=572, type=nncf_model_input]; +"573 nncf_model_output_0" [id=573, type=nncf_model_output]; +"574 nncf_model_output_1" [id=574, type=nncf_model_output]; +"575 nncf_model_output_2" [id=575, type=nncf_model_output]; "0 Conv_219" -> "1 BatchNormalization_220" [label="[1, 64, 600, 600]", style=solid]; "1 BatchNormalization_220" -> "2 Relu_221" [label="[1, 64, 600, 600]", style=solid]; "2 Relu_221" -> "3 MaxPool_222" [label="[1, 64, 600, 600]", style=solid]; @@ -632,459 +689,517 @@ strict digraph { "98 Relu_317" -> "99 Conv_318" [label="[1, 256, 150, 150]", style=solid]; "98 Relu_317" -> "119 Conv_338" [label="[1, 256, 150, 150]", style=solid]; "98 Relu_317" -> "121 Shape_340" [label="[1, 256, 150, 150]", style=solid]; -"98 Relu_317" -> "130 Conv_349" [label="[1, 256, 150, 150]", style=solid]; -"98 Relu_317" -> "132 Shape_351" [label="[1, 256, 150, 150]", style=solid]; +"98 Relu_317" -> "133 Conv_349" [label="[1, 256, 150, 150]", style=solid]; +"98 Relu_317" -> "135 Shape_351" [label="[1, 256, 150, 150]", style=solid]; "99 Conv_318" -> "100 Relu_319" [label="[1, 256, 150, 150]", style=solid]; "100 Relu_319" -> "101 Conv_320" [label="[1, 256, 150, 150]", style=solid]; "101 Conv_320" -> "102 Relu_321" [label="[1, 512, 75, 75]", style=solid]; "102 Relu_321" -> "103 Conv_322" [label="[1, 512, 75, 75]", style=solid]; -"102 Relu_321" -> "141 Conv_360" [label="[1, 512, 75, 75]", style=solid]; -"102 Relu_321" -> "143 Shape_362" [label="[1, 512, 75, 75]", style=solid]; -"102 Relu_321" -> "152 Conv_371" [label="[1, 512, 75, 75]", style=solid]; -"102 Relu_321" -> "154 Shape_373" [label="[1, 512, 75, 75]", style=solid]; +"102 Relu_321" -> "147 Conv_360" [label="[1, 512, 75, 75]", style=solid]; +"102 Relu_321" -> "149 Shape_362" [label="[1, 512, 75, 75]", style=solid]; +"102 Relu_321" -> "161 Conv_371" [label="[1, 512, 75, 75]", style=solid]; +"102 Relu_321" -> "163 Shape_373" [label="[1, 512, 75, 75]", style=solid]; "103 Conv_322" -> "104 Relu_323" [label="[1, 256, 75, 75]", style=solid]; "104 Relu_323" -> "105 Conv_324" [label="[1, 256, 75, 75]", style=solid]; "105 Conv_324" -> "106 Relu_325" [label="[1, 512, 38, 38]", style=solid]; "106 Relu_325" -> "107 Conv_326" [label="[1, 512, 38, 38]", style=solid]; -"106 Relu_325" -> "163 Conv_382" [label="[1, 512, 38, 38]", style=solid]; -"106 Relu_325" -> "165 Shape_384" [label="[1, 512, 38, 38]", style=solid]; -"106 Relu_325" -> "174 Conv_393" [label="[1, 512, 38, 38]", style=solid]; -"106 Relu_325" -> "176 Shape_395" [label="[1, 512, 38, 38]", style=solid]; +"106 Relu_325" -> "175 Conv_382" [label="[1, 512, 38, 38]", style=solid]; +"106 Relu_325" -> "177 Shape_384" [label="[1, 512, 38, 38]", style=solid]; +"106 Relu_325" -> "189 Conv_393" [label="[1, 512, 38, 38]", style=solid]; +"106 Relu_325" -> "191 Shape_395" [label="[1, 512, 38, 38]", style=solid]; "107 Conv_326" -> "108 Relu_327" [label="[1, 128, 38, 38]", style=solid]; "108 Relu_327" -> "109 Conv_328" [label="[1, 128, 38, 38]", style=solid]; "109 Conv_328" -> "110 Relu_329" [label="[1, 256, 19, 19]", style=solid]; "110 Relu_329" -> "111 Conv_330" [label="[1, 256, 19, 19]", style=solid]; -"110 Relu_329" -> "185 Conv_404" [label="[1, 256, 19, 19]", style=solid]; -"110 Relu_329" -> "187 Shape_406" [label="[1, 256, 19, 19]", style=solid]; -"110 Relu_329" -> "196 Conv_415" [label="[1, 256, 19, 19]", style=solid]; -"110 Relu_329" -> "198 Shape_417" [label="[1, 256, 19, 19]", style=solid]; +"110 Relu_329" -> "203 Conv_404" [label="[1, 256, 19, 19]", style=solid]; +"110 Relu_329" -> "205 Shape_406" [label="[1, 256, 19, 19]", style=solid]; +"110 Relu_329" -> "217 Conv_415" [label="[1, 256, 19, 19]", style=solid]; +"110 Relu_329" -> "219 Shape_417" [label="[1, 256, 19, 19]", style=solid]; "111 Conv_330" -> "112 Relu_331" [label="[1, 128, 19, 19]", style=solid]; "112 Relu_331" -> "113 Conv_332" [label="[1, 128, 19, 19]", style=solid]; "113 Conv_332" -> "114 Relu_333" [label="[1, 256, 9, 9]", style=solid]; "114 Relu_333" -> "115 Conv_334" [label="[1, 256, 9, 9]", style=solid]; -"114 Relu_333" -> "207 Conv_426" [label="[1, 256, 9, 9]", style=solid]; -"114 Relu_333" -> "209 Shape_428" [label="[1, 256, 9, 9]", style=solid]; -"114 Relu_333" -> "218 Conv_437" [label="[1, 256, 9, 9]", style=solid]; -"114 Relu_333" -> "220 Shape_439" [label="[1, 256, 9, 9]", style=solid]; +"114 Relu_333" -> "231 Conv_426" [label="[1, 256, 9, 9]", style=solid]; +"114 Relu_333" -> "233 Shape_428" [label="[1, 256, 9, 9]", style=solid]; +"114 Relu_333" -> "245 Conv_437" [label="[1, 256, 9, 9]", style=solid]; +"114 Relu_333" -> "247 Shape_439" [label="[1, 256, 9, 9]", style=solid]; "115 Conv_334" -> "116 Relu_335" [label="[1, 128, 9, 9]", style=solid]; "116 Relu_335" -> "117 Conv_336" [label="[1, 128, 9, 9]", style=solid]; "117 Conv_336" -> "118 Relu_337" [label="[1, 256, 7, 7]", style=solid]; -"118 Relu_337" -> "229 Conv_448" [label="[1, 256, 7, 7]", style=solid]; -"118 Relu_337" -> "231 Shape_450" [label="[1, 256, 7, 7]", style=solid]; -"118 Relu_337" -> "240 Conv_459" [label="[1, 256, 7, 7]", style=solid]; -"118 Relu_337" -> "242 Shape_461" [label="[1, 256, 7, 7]", style=solid]; -"119 Conv_338" -> "129 Reshape_348" [label="[1, 16, 50, 50]", style=solid]; +"118 Relu_337" -> "259 Conv_448" [label="[1, 256, 7, 7]", style=solid]; +"118 Relu_337" -> "261 Shape_450" [label="[1, 256, 7, 7]", style=solid]; +"118 Relu_337" -> "273 Conv_459" [label="[1, 256, 7, 7]", style=solid]; +"118 Relu_337" -> "275 Shape_461" [label="[1, 256, 7, 7]", style=solid]; +"119 Conv_338" -> "132 Reshape_348" [label="[1, 16, 50, 50]", style=solid]; "120 Constant_339" -> "122 Gather_341" [label="[]", style=dashed]; "121 Shape_340" -> "122 Gather_341" [label="[4]", style=dashed]; -"122 Gather_341" -> "125 Unsqueeze_344" [label="[]", style=dashed]; -"123 Constant_342" -> "126 Unsqueeze_345" [label="[]", style=dashed]; -"124 Constant_343" -> "127 Unsqueeze_346" [label="[]", style=dashed]; -"125 Unsqueeze_344" -> "128 Concat_347" [label="[1]", style=dashed]; -"126 Unsqueeze_345" -> "128 Concat_347" [label="[1]", style=dashed]; -"127 Unsqueeze_346" -> "128 Concat_347" [label="[1]", style=dashed]; -"128 Concat_347" -> "129 Reshape_348" [label="[3]", style=dashed]; -"129 Reshape_348" -> "251 Concat_470" [label="[1]", style=solid]; -"130 Conv_349" -> "140 Reshape_359" [label="[1, 324, 50, 50]", style=solid]; -"131 Constant_350" -> "133 Gather_352" [label="[]", style=dashed]; -"132 Shape_351" -> "133 Gather_352" [label="[4]", style=dashed]; -"133 Gather_352" -> "136 Unsqueeze_355" [label="[]", style=dashed]; -"134 Constant_353" -> "137 Unsqueeze_356" [label="[]", style=dashed]; -"135 Constant_354" -> "138 Unsqueeze_357" [label="[]", style=dashed]; -"136 Unsqueeze_355" -> "139 Concat_358" [label="[1]", style=dashed]; -"137 Unsqueeze_356" -> "139 Concat_358" [label="[1]", style=dashed]; -"138 Unsqueeze_357" -> "139 Concat_358" [label="[1]", style=dashed]; -"139 Concat_358" -> "140 Reshape_359" [label="[3]", style=dashed]; -"140 Reshape_359" -> "252 Concat_471" [label="[1]", style=solid]; -"141 Conv_360" -> "151 Reshape_370" [label="[1, 24, 25, 25]", style=solid]; -"142 Constant_361" -> "144 Gather_363" [label="[]", style=dashed]; -"143 Shape_362" -> "144 Gather_363" [label="[4]", style=dashed]; -"144 Gather_363" -> "147 Unsqueeze_366" [label="[]", style=dashed]; -"145 Constant_364" -> "148 Unsqueeze_367" [label="[]", style=dashed]; -"146 Constant_365" -> "149 Unsqueeze_368" [label="[]", style=dashed]; -"147 Unsqueeze_366" -> "150 Concat_369" [label="[1]", style=dashed]; -"148 Unsqueeze_367" -> "150 Concat_369" [label="[1]", style=dashed]; -"149 Unsqueeze_368" -> "150 Concat_369" [label="[1]", style=dashed]; -"150 Concat_369" -> "151 Reshape_370" [label="[3]", style=dashed]; -"151 Reshape_370" -> "251 Concat_470" [label="[1]", style=solid]; -"152 Conv_371" -> "162 Reshape_381" [label="[1, 486, 25, 25]", style=solid]; -"153 Constant_372" -> "155 Gather_374" [label="[]", style=dashed]; -"154 Shape_373" -> "155 Gather_374" [label="[4]", style=dashed]; -"155 Gather_374" -> "158 Unsqueeze_377" [label="[]", style=dashed]; -"156 Constant_375" -> "159 Unsqueeze_378" [label="[]", style=dashed]; -"157 Constant_376" -> "160 Unsqueeze_379" [label="[]", style=dashed]; -"158 Unsqueeze_377" -> "161 Concat_380" [label="[1]", style=dashed]; -"159 Unsqueeze_378" -> "161 Concat_380" [label="[1]", style=dashed]; -"160 Unsqueeze_379" -> "161 Concat_380" [label="[1]", style=dashed]; -"161 Concat_380" -> "162 Reshape_381" [label="[3]", style=dashed]; -"162 Reshape_381" -> "252 Concat_471" [label="[1]", style=solid]; -"163 Conv_382" -> "173 Reshape_392" [label="[1, 24, 13, 13]", style=solid]; -"164 Constant_383" -> "166 Gather_385" [label="[]", style=dashed]; -"165 Shape_384" -> "166 Gather_385" [label="[4]", style=dashed]; -"166 Gather_385" -> "169 Unsqueeze_388" [label="[]", style=dashed]; -"167 Constant_386" -> "170 Unsqueeze_389" [label="[]", style=dashed]; -"168 Constant_387" -> "171 Unsqueeze_390" [label="[]", style=dashed]; -"169 Unsqueeze_388" -> "172 Concat_391" [label="[1]", style=dashed]; -"170 Unsqueeze_389" -> "172 Concat_391" [label="[1]", style=dashed]; -"171 Unsqueeze_390" -> "172 Concat_391" [label="[1]", style=dashed]; -"172 Concat_391" -> "173 Reshape_392" [label="[3]", style=dashed]; -"173 Reshape_392" -> "251 Concat_470" [label="[1]", style=solid]; -"174 Conv_393" -> "184 Reshape_403" [label="[1, 486, 13, 13]", style=solid]; -"175 Constant_394" -> "177 Gather_396" [label="[]", style=dashed]; -"176 Shape_395" -> "177 Gather_396" [label="[4]", style=dashed]; -"177 Gather_396" -> "180 Unsqueeze_399" [label="[]", style=dashed]; -"178 Constant_397" -> "181 Unsqueeze_400" [label="[]", style=dashed]; -"179 Constant_398" -> "182 Unsqueeze_401" [label="[]", style=dashed]; -"180 Unsqueeze_399" -> "183 Concat_402" [label="[1]", style=dashed]; -"181 Unsqueeze_400" -> "183 Concat_402" [label="[1]", style=dashed]; -"182 Unsqueeze_401" -> "183 Concat_402" [label="[1]", style=dashed]; -"183 Concat_402" -> "184 Reshape_403" [label="[3]", style=dashed]; -"184 Reshape_403" -> "252 Concat_471" [label="[1]", style=solid]; -"185 Conv_404" -> "195 Reshape_414" [label="[1, 24, 7, 7]", style=solid]; -"186 Constant_405" -> "188 Gather_407" [label="[]", style=dashed]; -"187 Shape_406" -> "188 Gather_407" [label="[4]", style=dashed]; -"188 Gather_407" -> "191 Unsqueeze_410" [label="[]", style=dashed]; -"189 Constant_408" -> "192 Unsqueeze_411" [label="[]", style=dashed]; -"190 Constant_409" -> "193 Unsqueeze_412" [label="[]", style=dashed]; -"191 Unsqueeze_410" -> "194 Concat_413" [label="[1]", style=dashed]; -"192 Unsqueeze_411" -> "194 Concat_413" [label="[1]", style=dashed]; -"193 Unsqueeze_412" -> "194 Concat_413" [label="[1]", style=dashed]; -"194 Concat_413" -> "195 Reshape_414" [label="[3]", style=dashed]; -"195 Reshape_414" -> "251 Concat_470" [label="[1]", style=solid]; -"196 Conv_415" -> "206 Reshape_425" [label="[1, 486, 7, 7]", style=solid]; -"197 Constant_416" -> "199 Gather_418" [label="[]", style=dashed]; -"198 Shape_417" -> "199 Gather_418" [label="[4]", style=dashed]; -"199 Gather_418" -> "202 Unsqueeze_421" [label="[]", style=dashed]; -"200 Constant_419" -> "203 Unsqueeze_422" [label="[]", style=dashed]; -"201 Constant_420" -> "204 Unsqueeze_423" [label="[]", style=dashed]; -"202 Unsqueeze_421" -> "205 Concat_424" [label="[1]", style=dashed]; -"203 Unsqueeze_422" -> "205 Concat_424" [label="[1]", style=dashed]; -"204 Unsqueeze_423" -> "205 Concat_424" [label="[1]", style=dashed]; -"205 Concat_424" -> "206 Reshape_425" [label="[3]", style=dashed]; -"206 Reshape_425" -> "252 Concat_471" [label="[1]", style=solid]; -"207 Conv_426" -> "217 Reshape_436" [label="[1, 16, 3, 3]", style=solid]; -"208 Constant_427" -> "210 Gather_429" [label="[]", style=dashed]; -"209 Shape_428" -> "210 Gather_429" [label="[4]", style=dashed]; -"210 Gather_429" -> "213 Unsqueeze_432" [label="[]", style=dashed]; -"211 Constant_430" -> "214 Unsqueeze_433" [label="[]", style=dashed]; -"212 Constant_431" -> "215 Unsqueeze_434" [label="[]", style=dashed]; -"213 Unsqueeze_432" -> "216 Concat_435" [label="[1]", style=dashed]; -"214 Unsqueeze_433" -> "216 Concat_435" [label="[1]", style=dashed]; -"215 Unsqueeze_434" -> "216 Concat_435" [label="[1]", style=dashed]; -"216 Concat_435" -> "217 Reshape_436" [label="[3]", style=dashed]; -"217 Reshape_436" -> "251 Concat_470" [label="[1]", style=solid]; -"218 Conv_437" -> "228 Reshape_447" [label="[1, 324, 3, 3]", style=solid]; -"219 Constant_438" -> "221 Gather_440" [label="[]", style=dashed]; -"220 Shape_439" -> "221 Gather_440" [label="[4]", style=dashed]; -"221 Gather_440" -> "224 Unsqueeze_443" [label="[]", style=dashed]; -"222 Constant_441" -> "225 Unsqueeze_444" [label="[]", style=dashed]; -"223 Constant_442" -> "226 Unsqueeze_445" [label="[]", style=dashed]; -"224 Unsqueeze_443" -> "227 Concat_446" [label="[1]", style=dashed]; -"225 Unsqueeze_444" -> "227 Concat_446" [label="[1]", style=dashed]; -"226 Unsqueeze_445" -> "227 Concat_446" [label="[1]", style=dashed]; -"227 Concat_446" -> "228 Reshape_447" [label="[3]", style=dashed]; -"228 Reshape_447" -> "252 Concat_471" [label="[1]", style=solid]; -"229 Conv_448" -> "239 Reshape_458" [label="[1, 16, 3, 3]", style=solid]; -"230 Constant_449" -> "232 Gather_451" [label="[]", style=dashed]; -"231 Shape_450" -> "232 Gather_451" [label="[4]", style=dashed]; -"232 Gather_451" -> "235 Unsqueeze_454" [label="[]", style=dashed]; -"233 Constant_452" -> "236 Unsqueeze_455" [label="[]", style=dashed]; -"234 Constant_453" -> "237 Unsqueeze_456" [label="[]", style=dashed]; -"235 Unsqueeze_454" -> "238 Concat_457" [label="[1]", style=dashed]; -"236 Unsqueeze_455" -> "238 Concat_457" [label="[1]", style=dashed]; -"237 Unsqueeze_456" -> "238 Concat_457" [label="[1]", style=dashed]; -"238 Concat_457" -> "239 Reshape_458" [label="[3]", style=dashed]; -"239 Reshape_458" -> "251 Concat_470" [label="[1]", style=solid]; -"240 Conv_459" -> "250 Reshape_469" [label="[1, 324, 3, 3]", style=solid]; -"241 Constant_460" -> "243 Gather_462" [label="[]", style=dashed]; -"242 Shape_461" -> "243 Gather_462" [label="[4]", style=dashed]; -"243 Gather_462" -> "246 Unsqueeze_465" [label="[]", style=dashed]; -"244 Constant_463" -> "247 Unsqueeze_466" [label="[]", style=dashed]; -"245 Constant_464" -> "248 Unsqueeze_467" [label="[]", style=dashed]; -"246 Unsqueeze_465" -> "249 Concat_468" [label="[1]", style=dashed]; -"247 Unsqueeze_466" -> "249 Concat_468" [label="[1]", style=dashed]; -"248 Unsqueeze_467" -> "249 Concat_468" [label="[1]", style=dashed]; -"249 Concat_468" -> "250 Reshape_469" [label="[3]", style=dashed]; -"250 Reshape_469" -> "252 Concat_471" [label="[1]", style=solid]; -"251 Concat_470" -> "253 Transpose_472" [label="[1]", style=solid]; -"252 Concat_471" -> "254 Transpose_473" [label="[1]", style=solid]; -"253 Transpose_472" -> "259 Slice_478" [label="[1]", style=solid]; -"253 Transpose_472" -> "276 Slice_495" [label="[1]", style=solid]; -"254 Transpose_473" -> "441 Softmax_660" [label="[1]", style=solid]; -"255 Constant_474" -> "259 Slice_478" [label="[1]", style=dashed]; -"256 Constant_475" -> "259 Slice_478" [label="[1]", style=dashed]; -"257 Constant_476" -> "259 Slice_478" [label="[1]", style=dashed]; -"258 Constant_477" -> "259 Slice_478" [label="[1]", style=dashed]; -"259 Slice_478" -> "264 Slice_483" [label="[1]", style=solid]; -"260 Constant_479" -> "264 Slice_483" [label="[1]", style=dashed]; -"261 Constant_480" -> "264 Slice_483" [label="[1]", style=dashed]; -"262 Constant_481" -> "264 Slice_483" [label="[1]", style=dashed]; -"263 Constant_482" -> "264 Slice_483" [label="[1]", style=dashed]; -"264 Slice_483" -> "269 Slice_488" [label="[1]", style=solid]; -"265 Constant_484" -> "269 Slice_488" [label="[1]", style=dashed]; -"266 Constant_485" -> "269 Slice_488" [label="[1]", style=dashed]; -"267 Constant_486" -> "269 Slice_488" [label="[1]", style=dashed]; -"268 Constant_487" -> "269 Slice_488" [label="[1]", style=dashed]; -"269 Slice_488" -> "271 Mul_490" [label="[1]", style=solid]; -"270 Constant_489" -> "271 Mul_490" [label="[]", style=solid]; -"271 Mul_490" -> "290 Mul_509" [label="[1]", style=solid]; -"272 Constant_491" -> "276 Slice_495" [label="[1]", style=dashed]; -"273 Constant_492" -> "276 Slice_495" [label="[1]", style=dashed]; -"274 Constant_493" -> "276 Slice_495" [label="[1]", style=dashed]; -"275 Constant_494" -> "276 Slice_495" [label="[1]", style=dashed]; -"276 Slice_495" -> "281 Slice_500" [label="[1]", style=solid]; -"277 Constant_496" -> "281 Slice_500" [label="[1]", style=dashed]; -"278 Constant_497" -> "281 Slice_500" [label="[1]", style=dashed]; -"279 Constant_498" -> "281 Slice_500" [label="[1]", style=dashed]; -"280 Constant_499" -> "281 Slice_500" [label="[1]", style=dashed]; -"281 Slice_500" -> "286 Slice_505" [label="[1]", style=solid]; -"282 Constant_501" -> "286 Slice_505" [label="[1]", style=dashed]; -"283 Constant_502" -> "286 Slice_505" [label="[1]", style=dashed]; -"284 Constant_503" -> "286 Slice_505" [label="[1]", style=dashed]; -"285 Constant_504" -> "286 Slice_505" [label="[1]", style=dashed]; -"286 Slice_505" -> "288 Mul_507" [label="[1]", style=solid]; -"287 Constant_506" -> "288 Mul_507" [label="[]", style=solid]; -"288 Mul_507" -> "293 Exp_512" [label="[1]", style=solid]; -"289 Constant_508" -> "290 Mul_509" [label="[1, 15130, 2]", style=solid]; -"290 Mul_509" -> "292 Add_511" [label="[1]", style=solid]; -"291 Constant_510" -> "292 Add_511" [label="[1, 15130, 2]", style=solid]; -"292 Add_511" -> "300 Slice_519" [label="[1]", style=solid]; -"292 Add_511" -> "335 Slice_554" [label="[1]", style=solid]; -"292 Add_511" -> "370 Slice_589" [label="[1]", style=solid]; -"292 Add_511" -> "405 Slice_624" [label="[1]", style=solid]; -"293 Exp_512" -> "295 Mul_514" [label="[1]", style=solid]; -"294 Constant_513" -> "295 Mul_514" [label="[1, 15130, 2]", style=solid]; -"295 Mul_514" -> "316 Slice_535" [label="[1]", style=solid]; -"295 Mul_514" -> "351 Slice_570" [label="[1]", style=solid]; -"295 Mul_514" -> "386 Slice_605" [label="[1]", style=solid]; -"295 Mul_514" -> "421 Slice_640" [label="[1]", style=solid]; -"296 Constant_515" -> "300 Slice_519" [label="[1]", style=dashed]; -"297 Constant_516" -> "300 Slice_519" [label="[1]", style=dashed]; -"298 Constant_517" -> "300 Slice_519" [label="[1]", style=dashed]; -"299 Constant_518" -> "300 Slice_519" [label="[1]", style=dashed]; -"300 Slice_519" -> "305 Slice_524" [label="[1]", style=solid]; -"301 Constant_520" -> "305 Slice_524" [label="[1]", style=dashed]; -"302 Constant_521" -> "305 Slice_524" [label="[1]", style=dashed]; -"303 Constant_522" -> "305 Slice_524" [label="[1]", style=dashed]; -"304 Constant_523" -> "305 Slice_524" [label="[1]", style=dashed]; -"305 Slice_524" -> "310 Slice_529" [label="[1]", style=solid]; -"306 Constant_525" -> "310 Slice_529" [label="[1]", style=dashed]; -"307 Constant_526" -> "310 Slice_529" [label="[1]", style=dashed]; -"308 Constant_527" -> "310 Slice_529" [label="[1]", style=dashed]; -"309 Constant_528" -> "310 Slice_529" [label="[1]", style=dashed]; -"310 Slice_529" -> "311 Squeeze_530" [label="[1]", style=solid]; -"311 Squeeze_530" -> "330 Sub_549" [label="[1]", style=solid]; -"312 Constant_531" -> "316 Slice_535" [label="[1]", style=dashed]; -"313 Constant_532" -> "316 Slice_535" [label="[1]", style=dashed]; -"314 Constant_533" -> "316 Slice_535" [label="[1]", style=dashed]; -"315 Constant_534" -> "316 Slice_535" [label="[1]", style=dashed]; -"316 Slice_535" -> "321 Slice_540" [label="[1]", style=solid]; -"317 Constant_536" -> "321 Slice_540" [label="[1]", style=dashed]; -"318 Constant_537" -> "321 Slice_540" [label="[1]", style=dashed]; -"319 Constant_538" -> "321 Slice_540" [label="[1]", style=dashed]; -"320 Constant_539" -> "321 Slice_540" [label="[1]", style=dashed]; -"321 Slice_540" -> "326 Slice_545" [label="[1]", style=solid]; -"322 Constant_541" -> "326 Slice_545" [label="[1]", style=dashed]; -"323 Constant_542" -> "326 Slice_545" [label="[1]", style=dashed]; -"324 Constant_543" -> "326 Slice_545" [label="[1]", style=dashed]; -"325 Constant_544" -> "326 Slice_545" [label="[1]", style=dashed]; -"326 Slice_545" -> "327 Squeeze_546" [label="[1]", style=solid]; -"327 Squeeze_546" -> "329 Mul_548" [label="[1]", style=solid]; -"328 Constant_547" -> "329 Mul_548" [label="[]", style=solid]; -"329 Mul_548" -> "330 Sub_549" [label="[1]", style=solid]; -"330 Sub_549" -> "436 Unsqueeze_655" [label="[1]", style=solid]; -"331 Constant_550" -> "335 Slice_554" [label="[1]", style=dashed]; -"332 Constant_551" -> "335 Slice_554" [label="[1]", style=dashed]; -"333 Constant_552" -> "335 Slice_554" [label="[1]", style=dashed]; -"334 Constant_553" -> "335 Slice_554" [label="[1]", style=dashed]; -"335 Slice_554" -> "340 Slice_559" [label="[1]", style=solid]; -"336 Constant_555" -> "340 Slice_559" [label="[1]", style=dashed]; -"337 Constant_556" -> "340 Slice_559" [label="[1]", style=dashed]; -"338 Constant_557" -> "340 Slice_559" [label="[1]", style=dashed]; -"339 Constant_558" -> "340 Slice_559" [label="[1]", style=dashed]; -"340 Slice_559" -> "345 Slice_564" [label="[1]", style=solid]; -"341 Constant_560" -> "345 Slice_564" [label="[1]", style=dashed]; -"342 Constant_561" -> "345 Slice_564" [label="[1]", style=dashed]; -"343 Constant_562" -> "345 Slice_564" [label="[1]", style=dashed]; -"344 Constant_563" -> "345 Slice_564" [label="[1]", style=dashed]; -"345 Slice_564" -> "346 Squeeze_565" [label="[1]", style=solid]; -"346 Squeeze_565" -> "365 Sub_584" [label="[1]", style=solid]; -"347 Constant_566" -> "351 Slice_570" [label="[1]", style=dashed]; -"348 Constant_567" -> "351 Slice_570" [label="[1]", style=dashed]; -"349 Constant_568" -> "351 Slice_570" [label="[1]", style=dashed]; -"350 Constant_569" -> "351 Slice_570" [label="[1]", style=dashed]; -"351 Slice_570" -> "356 Slice_575" [label="[1]", style=solid]; -"352 Constant_571" -> "356 Slice_575" [label="[1]", style=dashed]; -"353 Constant_572" -> "356 Slice_575" [label="[1]", style=dashed]; -"354 Constant_573" -> "356 Slice_575" [label="[1]", style=dashed]; -"355 Constant_574" -> "356 Slice_575" [label="[1]", style=dashed]; -"356 Slice_575" -> "361 Slice_580" [label="[1]", style=solid]; -"357 Constant_576" -> "361 Slice_580" [label="[1]", style=dashed]; -"358 Constant_577" -> "361 Slice_580" [label="[1]", style=dashed]; -"359 Constant_578" -> "361 Slice_580" [label="[1]", style=dashed]; -"360 Constant_579" -> "361 Slice_580" [label="[1]", style=dashed]; -"361 Slice_580" -> "362 Squeeze_581" [label="[1]", style=solid]; -"362 Squeeze_581" -> "364 Mul_583" [label="[1]", style=solid]; -"363 Constant_582" -> "364 Mul_583" [label="[]", style=solid]; -"364 Mul_583" -> "365 Sub_584" [label="[1]", style=solid]; -"365 Sub_584" -> "437 Unsqueeze_656" [label="[1]", style=solid]; -"366 Constant_585" -> "370 Slice_589" [label="[1]", style=dashed]; -"367 Constant_586" -> "370 Slice_589" [label="[1]", style=dashed]; -"368 Constant_587" -> "370 Slice_589" [label="[1]", style=dashed]; -"369 Constant_588" -> "370 Slice_589" [label="[1]", style=dashed]; -"370 Slice_589" -> "375 Slice_594" [label="[1]", style=solid]; -"371 Constant_590" -> "375 Slice_594" [label="[1]", style=dashed]; -"372 Constant_591" -> "375 Slice_594" [label="[1]", style=dashed]; -"373 Constant_592" -> "375 Slice_594" [label="[1]", style=dashed]; -"374 Constant_593" -> "375 Slice_594" [label="[1]", style=dashed]; -"375 Slice_594" -> "380 Slice_599" [label="[1]", style=solid]; -"376 Constant_595" -> "380 Slice_599" [label="[1]", style=dashed]; -"377 Constant_596" -> "380 Slice_599" [label="[1]", style=dashed]; -"378 Constant_597" -> "380 Slice_599" [label="[1]", style=dashed]; -"379 Constant_598" -> "380 Slice_599" [label="[1]", style=dashed]; -"380 Slice_599" -> "381 Squeeze_600" [label="[1]", style=solid]; -"381 Squeeze_600" -> "400 Add_619" [label="[1]", style=solid]; -"382 Constant_601" -> "386 Slice_605" [label="[1]", style=dashed]; -"383 Constant_602" -> "386 Slice_605" [label="[1]", style=dashed]; -"384 Constant_603" -> "386 Slice_605" [label="[1]", style=dashed]; -"385 Constant_604" -> "386 Slice_605" [label="[1]", style=dashed]; -"386 Slice_605" -> "391 Slice_610" [label="[1]", style=solid]; -"387 Constant_606" -> "391 Slice_610" [label="[1]", style=dashed]; -"388 Constant_607" -> "391 Slice_610" [label="[1]", style=dashed]; -"389 Constant_608" -> "391 Slice_610" [label="[1]", style=dashed]; -"390 Constant_609" -> "391 Slice_610" [label="[1]", style=dashed]; -"391 Slice_610" -> "396 Slice_615" [label="[1]", style=solid]; -"392 Constant_611" -> "396 Slice_615" [label="[1]", style=dashed]; -"393 Constant_612" -> "396 Slice_615" [label="[1]", style=dashed]; -"394 Constant_613" -> "396 Slice_615" [label="[1]", style=dashed]; -"395 Constant_614" -> "396 Slice_615" [label="[1]", style=dashed]; -"396 Slice_615" -> "397 Squeeze_616" [label="[1]", style=solid]; -"397 Squeeze_616" -> "399 Mul_618" [label="[1]", style=solid]; -"398 Constant_617" -> "399 Mul_618" [label="[]", style=solid]; -"399 Mul_618" -> "400 Add_619" [label="[1]", style=solid]; -"400 Add_619" -> "438 Unsqueeze_657" [label="[1]", style=solid]; -"401 Constant_620" -> "405 Slice_624" [label="[1]", style=dashed]; -"402 Constant_621" -> "405 Slice_624" [label="[1]", style=dashed]; -"403 Constant_622" -> "405 Slice_624" [label="[1]", style=dashed]; -"404 Constant_623" -> "405 Slice_624" [label="[1]", style=dashed]; -"405 Slice_624" -> "410 Slice_629" [label="[1]", style=solid]; -"406 Constant_625" -> "410 Slice_629" [label="[1]", style=dashed]; -"407 Constant_626" -> "410 Slice_629" [label="[1]", style=dashed]; -"408 Constant_627" -> "410 Slice_629" [label="[1]", style=dashed]; -"409 Constant_628" -> "410 Slice_629" [label="[1]", style=dashed]; -"410 Slice_629" -> "415 Slice_634" [label="[1]", style=solid]; -"411 Constant_630" -> "415 Slice_634" [label="[1]", style=dashed]; -"412 Constant_631" -> "415 Slice_634" [label="[1]", style=dashed]; -"413 Constant_632" -> "415 Slice_634" [label="[1]", style=dashed]; -"414 Constant_633" -> "415 Slice_634" [label="[1]", style=dashed]; -"415 Slice_634" -> "416 Squeeze_635" [label="[1]", style=solid]; -"416 Squeeze_635" -> "435 Add_654" [label="[1]", style=solid]; -"417 Constant_636" -> "421 Slice_640" [label="[1]", style=dashed]; -"418 Constant_637" -> "421 Slice_640" [label="[1]", style=dashed]; -"419 Constant_638" -> "421 Slice_640" [label="[1]", style=dashed]; -"420 Constant_639" -> "421 Slice_640" [label="[1]", style=dashed]; -"421 Slice_640" -> "426 Slice_645" [label="[1]", style=solid]; -"422 Constant_641" -> "426 Slice_645" [label="[1]", style=dashed]; -"423 Constant_642" -> "426 Slice_645" [label="[1]", style=dashed]; -"424 Constant_643" -> "426 Slice_645" [label="[1]", style=dashed]; -"425 Constant_644" -> "426 Slice_645" [label="[1]", style=dashed]; -"426 Slice_645" -> "431 Slice_650" [label="[1]", style=solid]; -"427 Constant_646" -> "431 Slice_650" [label="[1]", style=dashed]; -"428 Constant_647" -> "431 Slice_650" [label="[1]", style=dashed]; -"429 Constant_648" -> "431 Slice_650" [label="[1]", style=dashed]; -"430 Constant_649" -> "431 Slice_650" [label="[1]", style=dashed]; -"431 Slice_650" -> "432 Squeeze_651" [label="[1]", style=solid]; -"432 Squeeze_651" -> "434 Mul_653" [label="[1]", style=solid]; -"433 Constant_652" -> "434 Mul_653" [label="[]", style=solid]; -"434 Mul_653" -> "435 Add_654" [label="[1]", style=solid]; -"435 Add_654" -> "439 Unsqueeze_658" [label="[1]", style=solid]; -"436 Unsqueeze_655" -> "440 Concat_659" [label="[1]", style=solid]; -"437 Unsqueeze_656" -> "440 Concat_659" [label="[1]", style=solid]; -"438 Unsqueeze_657" -> "440 Concat_659" [label="[1]", style=solid]; -"439 Unsqueeze_658" -> "440 Concat_659" [label="[1]", style=solid]; -"440 Concat_659" -> "464 NonMaxSuppression_683" [label="[1]", style=solid]; -"440 Concat_659" -> "499 Squeeze_719" [label="[1]", style=solid]; -"441 Softmax_660" -> "442 Transpose_661" [label="[1]", style=solid]; -"442 Transpose_661" -> "447 Slice_666" [label="[1]", style=solid]; -"443 Constant_662" -> "447 Slice_666" [label="[1]", style=dashed]; -"444 Constant_663" -> "447 Slice_666" [label="[1]", style=dashed]; -"445 Constant_664" -> "447 Slice_666" [label="[1]", style=dashed]; -"446 Constant_665" -> "447 Slice_666" [label="[1]", style=dashed]; -"447 Slice_666" -> "452 Slice_671" [label="[1]", style=solid]; -"448 Constant_667" -> "452 Slice_671" [label="[1]", style=dashed]; -"449 Constant_668" -> "452 Slice_671" [label="[1]", style=dashed]; -"450 Constant_669" -> "452 Slice_671" [label="[1]", style=dashed]; -"451 Constant_670" -> "452 Slice_671" [label="[1]", style=dashed]; -"452 Slice_671" -> "457 Slice_676" [label="[1]", style=solid]; -"453 Constant_672" -> "457 Slice_676" [label="[1]", style=dashed]; -"454 Constant_673" -> "457 Slice_676" [label="[1]", style=dashed]; -"455 Constant_674" -> "457 Slice_676" [label="[1]", style=dashed]; -"456 Constant_675" -> "457 Slice_676" [label="[1]", style=dashed]; -"457 Slice_676" -> "464 NonMaxSuppression_683" [label="[1]", style=solid]; -"457 Slice_676" -> "480 Reshape_699" [label="[1]", style=solid]; -"457 Slice_676" -> "481 Shape_700" [label="[1]", style=solid]; -"458 Constant_677" -> "459 ConstantOfShape_678" [label="[1]", style=dashed]; -"459 ConstantOfShape_678" -> "464 NonMaxSuppression_683" [label="[1]", style=dashed]; -"460 Constant_679" -> "461 ConstantOfShape_680" [label="[1]", style=dashed]; -"461 ConstantOfShape_680" -> "464 NonMaxSuppression_683" [label="[1]", style=solid]; -"462 Constant_681" -> "463 ConstantOfShape_682" [label="[1]", style=dashed]; -"463 ConstantOfShape_682" -> "464 NonMaxSuppression_683" [label="[1]", style=solid]; -"464 NonMaxSuppression_683" -> "469 Slice_688" [label="[1]", style=dashed]; -"464 NonMaxSuppression_683" -> "476 Slice_695" [label="[1]", style=dashed]; -"465 Constant_684" -> "469 Slice_688" [label="[1]", style=dashed]; -"466 Constant_685" -> "469 Slice_688" [label="[1]", style=dashed]; -"467 Constant_686" -> "469 Slice_688" [label="[1]", style=dashed]; -"468 Constant_687" -> "469 Slice_688" [label="[1]", style=dashed]; -"469 Slice_688" -> "471 Gather_690" [label="[1]", style=dashed]; -"470 Constant_689" -> "471 Gather_690" [label="[]", style=dashed]; -"471 Gather_690" -> "484 Mul_703" [label="[1]", style=dashed]; -"471 Gather_690" -> "509 Gather_729" [label="[1]", style=dashed]; -"472 Constant_691" -> "476 Slice_695" [label="[1]", style=dashed]; -"473 Constant_692" -> "476 Slice_695" [label="[1]", style=dashed]; -"474 Constant_693" -> "476 Slice_695" [label="[1]", style=dashed]; -"475 Constant_694" -> "476 Slice_695" [label="[1]", style=dashed]; -"476 Slice_695" -> "478 Gather_697" [label="[1]", style=dashed]; -"477 Constant_696" -> "478 Gather_697" [label="[]", style=dashed]; -"478 Gather_697" -> "485 Add_704" [label="[1]", style=dashed]; -"478 Gather_697" -> "500 Gather_720" [label="[1]", style=dashed]; -"479 Constant_698" -> "480 Reshape_699" [label="[1]", style=dashed]; -"480 Reshape_699" -> "487 Gather_706" [label="[1]", style=solid]; -"481 Shape_700" -> "483 Gather_702" [label="[1]", style=dashed]; -"482 Constant_701" -> "483 Gather_702" [label="[]", style=dashed]; -"483 Gather_702" -> "484 Mul_703" [label="[]", style=dashed]; -"484 Mul_703" -> "485 Add_704" [label="[1]", style=dashed]; -"485 Add_704" -> "486 Cast_705" [label="[1]", style=dashed]; -"486 Cast_705" -> "487 Gather_706" [label="[1]", style=dashed]; -"487 Gather_706" -> "488 Shape_707" [label="[1]", style=solid]; -"487 Gather_706" -> "498 TopK_717" [label="[1]", style=solid]; -"487 Gather_706" -> "513 Gather_733" [label="[1]", style=solid]; -"488 Shape_707" -> "490 Gather_709" [label="[1]", style=dashed]; -"489 Constant_708" -> "490 Gather_709" [label="[]", style=dashed]; -"490 Gather_709" -> "491 Unsqueeze_710" [label="[]", style=dashed]; -"491 Unsqueeze_710" -> "493 Concat_712" [label="[1]", style=dashed]; -"492 Constant_711" -> "493 Concat_712" [label="[1]", style=dashed]; -"493 Concat_712" -> "494 Cast_713" [label="[2]", style=dashed]; -"494 Cast_713" -> "495 ReduceMin_714" [label="[2]", style=dashed]; -"495 ReduceMin_714" -> "496 Cast_715" [label="[]", style=dashed]; -"496 Cast_715" -> "497 Unsqueeze_716" [label="[]", style=dashed]; -"497 Unsqueeze_716" -> "498 TopK_717" [label="[1]", style=dashed]; -"498 TopK_717" -> "500 Gather_720" [label="[1]", style=dashed]; -"498 TopK_717" -> "509 Gather_729" [label="[1]", style=dashed]; -"498 TopK_717" -> "513 Gather_733" [label="[1]", style=dashed]; -"499 Squeeze_719" -> "505 Slice_725" [label="[1]", style=solid]; -"500 Gather_720" -> "506 Cast_726" [label="[1]", style=dashed]; -"501 Constant_721" -> "505 Slice_725" [label="[1]", style=dashed]; -"502 Constant_722" -> "505 Slice_725" [label="[1]", style=dashed]; -"503 Constant_723" -> "505 Slice_725" [label="[1]", style=dashed]; -"504 Constant_724" -> "505 Slice_725" [label="[1]", style=dashed]; -"505 Slice_725" -> "507 Gather_727" [label="[1]", style=solid]; -"506 Cast_726" -> "507 Gather_727" [label="[1]", style=dashed]; -"507 Gather_727" -> "508 Unsqueeze_bboxes" [label="[1]", style=solid]; -"508 Unsqueeze_bboxes" -> "516 nncf_model_output_0" [label="[1, 1, 4]", style=solid]; -"509 Gather_729" -> "510 Unsqueeze_730" [label="[1]", style=dashed]; -"510 Unsqueeze_730" -> "512 Add_labels" [label="[1]", style=dashed]; -"511 Constant_731" -> "512 Add_labels" [label="[]", style=dashed]; -"512 Add_labels" -> "517 nncf_model_output_1" [label="[1, 1]", style=dashed]; -"513 Gather_733" -> "514 Unsqueeze_scores" [label="[1]", style=solid]; -"514 Unsqueeze_scores" -> "518 nncf_model_output_2" [label="[1, 1]", style=solid]; -"515 nncf_model_input_0" -> "0 Conv_219" [label="[1, 3, 1200, 1200]", style=solid]; +"122 Gather_341" -> "126 Unsqueeze_344" [label="[]", style=dashed]; +"123 Constant_342" -> "128 Unsqueeze_345" [label="[]", style=dashed]; +"124 Constant_343" -> "130 Unsqueeze_346" [label="[]", style=dashed]; +"125 Constant_nncf_125" -> "126 Unsqueeze_344" [label="[1]", style=dashed]; +"126 Unsqueeze_344" -> "131 Concat_347" [label="[1]", style=dashed]; +"127 Constant_nncf_127" -> "128 Unsqueeze_345" [label="[1]", style=dashed]; +"128 Unsqueeze_345" -> "131 Concat_347" [label="[1]", style=dashed]; +"129 Constant_nncf_129" -> "130 Unsqueeze_346" [label="[1]", style=dashed]; +"130 Unsqueeze_346" -> "131 Concat_347" [label="[1]", style=dashed]; +"131 Concat_347" -> "132 Reshape_348" [label="[3]", style=dashed]; +"132 Reshape_348" -> "287 Concat_470" [label="[]", style=solid]; +"133 Conv_349" -> "146 Reshape_359" [label="[1, 324, 50, 50]", style=solid]; +"134 Constant_350" -> "136 Gather_352" [label="[]", style=dashed]; +"135 Shape_351" -> "136 Gather_352" [label="[4]", style=dashed]; +"136 Gather_352" -> "140 Unsqueeze_355" [label="[]", style=dashed]; +"137 Constant_353" -> "142 Unsqueeze_356" [label="[]", style=dashed]; +"138 Constant_354" -> "144 Unsqueeze_357" [label="[]", style=dashed]; +"139 Constant_nncf_139" -> "140 Unsqueeze_355" [label="[1]", style=dashed]; +"140 Unsqueeze_355" -> "145 Concat_358" [label="[1]", style=dashed]; +"141 Constant_nncf_141" -> "142 Unsqueeze_356" [label="[1]", style=dashed]; +"142 Unsqueeze_356" -> "145 Concat_358" [label="[1]", style=dashed]; +"143 Constant_nncf_143" -> "144 Unsqueeze_357" [label="[1]", style=dashed]; +"144 Unsqueeze_357" -> "145 Concat_358" [label="[1]", style=dashed]; +"145 Concat_358" -> "146 Reshape_359" [label="[3]", style=dashed]; +"146 Reshape_359" -> "288 Concat_471" [label="[]", style=solid]; +"147 Conv_360" -> "160 Reshape_370" [label="[1, 24, 25, 25]", style=solid]; +"148 Constant_361" -> "150 Gather_363" [label="[]", style=dashed]; +"149 Shape_362" -> "150 Gather_363" [label="[4]", style=dashed]; +"150 Gather_363" -> "154 Unsqueeze_366" [label="[]", style=dashed]; +"151 Constant_364" -> "156 Unsqueeze_367" [label="[]", style=dashed]; +"152 Constant_365" -> "158 Unsqueeze_368" [label="[]", style=dashed]; +"153 Constant_nncf_153" -> "154 Unsqueeze_366" [label="[1]", style=dashed]; +"154 Unsqueeze_366" -> "159 Concat_369" [label="[1]", style=dashed]; +"155 Constant_nncf_155" -> "156 Unsqueeze_367" [label="[1]", style=dashed]; +"156 Unsqueeze_367" -> "159 Concat_369" [label="[1]", style=dashed]; +"157 Constant_nncf_157" -> "158 Unsqueeze_368" [label="[1]", style=dashed]; +"158 Unsqueeze_368" -> "159 Concat_369" [label="[1]", style=dashed]; +"159 Concat_369" -> "160 Reshape_370" [label="[3]", style=dashed]; +"160 Reshape_370" -> "287 Concat_470" [label="[]", style=solid]; +"161 Conv_371" -> "174 Reshape_381" [label="[1, 486, 25, 25]", style=solid]; +"162 Constant_372" -> "164 Gather_374" [label="[]", style=dashed]; +"163 Shape_373" -> "164 Gather_374" [label="[4]", style=dashed]; +"164 Gather_374" -> "168 Unsqueeze_377" [label="[]", style=dashed]; +"165 Constant_375" -> "170 Unsqueeze_378" [label="[]", style=dashed]; +"166 Constant_376" -> "172 Unsqueeze_379" [label="[]", style=dashed]; +"167 Constant_nncf_167" -> "168 Unsqueeze_377" [label="[1]", style=dashed]; +"168 Unsqueeze_377" -> "173 Concat_380" [label="[1]", style=dashed]; +"169 Constant_nncf_169" -> "170 Unsqueeze_378" [label="[1]", style=dashed]; +"170 Unsqueeze_378" -> "173 Concat_380" [label="[1]", style=dashed]; +"171 Constant_nncf_171" -> "172 Unsqueeze_379" [label="[1]", style=dashed]; +"172 Unsqueeze_379" -> "173 Concat_380" [label="[1]", style=dashed]; +"173 Concat_380" -> "174 Reshape_381" [label="[3]", style=dashed]; +"174 Reshape_381" -> "288 Concat_471" [label="[]", style=solid]; +"175 Conv_382" -> "188 Reshape_392" [label="[1, 24, 13, 13]", style=solid]; +"176 Constant_383" -> "178 Gather_385" [label="[]", style=dashed]; +"177 Shape_384" -> "178 Gather_385" [label="[4]", style=dashed]; +"178 Gather_385" -> "182 Unsqueeze_388" [label="[]", style=dashed]; +"179 Constant_386" -> "184 Unsqueeze_389" [label="[]", style=dashed]; +"180 Constant_387" -> "186 Unsqueeze_390" [label="[]", style=dashed]; +"181 Constant_nncf_181" -> "182 Unsqueeze_388" [label="[1]", style=dashed]; +"182 Unsqueeze_388" -> "187 Concat_391" [label="[1]", style=dashed]; +"183 Constant_nncf_183" -> "184 Unsqueeze_389" [label="[1]", style=dashed]; +"184 Unsqueeze_389" -> "187 Concat_391" [label="[1]", style=dashed]; +"185 Constant_nncf_185" -> "186 Unsqueeze_390" [label="[1]", style=dashed]; +"186 Unsqueeze_390" -> "187 Concat_391" [label="[1]", style=dashed]; +"187 Concat_391" -> "188 Reshape_392" [label="[3]", style=dashed]; +"188 Reshape_392" -> "287 Concat_470" [label="[]", style=solid]; +"189 Conv_393" -> "202 Reshape_403" [label="[1, 486, 13, 13]", style=solid]; +"190 Constant_394" -> "192 Gather_396" [label="[]", style=dashed]; +"191 Shape_395" -> "192 Gather_396" [label="[4]", style=dashed]; +"192 Gather_396" -> "196 Unsqueeze_399" [label="[]", style=dashed]; +"193 Constant_397" -> "198 Unsqueeze_400" [label="[]", style=dashed]; +"194 Constant_398" -> "200 Unsqueeze_401" [label="[]", style=dashed]; +"195 Constant_nncf_195" -> "196 Unsqueeze_399" [label="[1]", style=dashed]; +"196 Unsqueeze_399" -> "201 Concat_402" [label="[1]", style=dashed]; +"197 Constant_nncf_197" -> "198 Unsqueeze_400" [label="[1]", style=dashed]; +"198 Unsqueeze_400" -> "201 Concat_402" [label="[1]", style=dashed]; +"199 Constant_nncf_199" -> "200 Unsqueeze_401" [label="[1]", style=dashed]; +"200 Unsqueeze_401" -> "201 Concat_402" [label="[1]", style=dashed]; +"201 Concat_402" -> "202 Reshape_403" [label="[3]", style=dashed]; +"202 Reshape_403" -> "288 Concat_471" [label="[]", style=solid]; +"203 Conv_404" -> "216 Reshape_414" [label="[1, 24, 7, 7]", style=solid]; +"204 Constant_405" -> "206 Gather_407" [label="[]", style=dashed]; +"205 Shape_406" -> "206 Gather_407" [label="[4]", style=dashed]; +"206 Gather_407" -> "210 Unsqueeze_410" [label="[]", style=dashed]; +"207 Constant_408" -> "212 Unsqueeze_411" [label="[]", style=dashed]; +"208 Constant_409" -> "214 Unsqueeze_412" [label="[]", style=dashed]; +"209 Constant_nncf_209" -> "210 Unsqueeze_410" [label="[1]", style=dashed]; +"210 Unsqueeze_410" -> "215 Concat_413" [label="[1]", style=dashed]; +"211 Constant_nncf_211" -> "212 Unsqueeze_411" [label="[1]", style=dashed]; +"212 Unsqueeze_411" -> "215 Concat_413" [label="[1]", style=dashed]; +"213 Constant_nncf_213" -> "214 Unsqueeze_412" [label="[1]", style=dashed]; +"214 Unsqueeze_412" -> "215 Concat_413" [label="[1]", style=dashed]; +"215 Concat_413" -> "216 Reshape_414" [label="[3]", style=dashed]; +"216 Reshape_414" -> "287 Concat_470" [label="[]", style=solid]; +"217 Conv_415" -> "230 Reshape_425" [label="[1, 486, 7, 7]", style=solid]; +"218 Constant_416" -> "220 Gather_418" [label="[]", style=dashed]; +"219 Shape_417" -> "220 Gather_418" [label="[4]", style=dashed]; +"220 Gather_418" -> "224 Unsqueeze_421" [label="[]", style=dashed]; +"221 Constant_419" -> "226 Unsqueeze_422" [label="[]", style=dashed]; +"222 Constant_420" -> "228 Unsqueeze_423" [label="[]", style=dashed]; +"223 Constant_nncf_223" -> "224 Unsqueeze_421" [label="[1]", style=dashed]; +"224 Unsqueeze_421" -> "229 Concat_424" [label="[1]", style=dashed]; +"225 Constant_nncf_225" -> "226 Unsqueeze_422" [label="[1]", style=dashed]; +"226 Unsqueeze_422" -> "229 Concat_424" [label="[1]", style=dashed]; +"227 Constant_nncf_227" -> "228 Unsqueeze_423" [label="[1]", style=dashed]; +"228 Unsqueeze_423" -> "229 Concat_424" [label="[1]", style=dashed]; +"229 Concat_424" -> "230 Reshape_425" [label="[3]", style=dashed]; +"230 Reshape_425" -> "288 Concat_471" [label="[]", style=solid]; +"231 Conv_426" -> "244 Reshape_436" [label="[1, 16, 3, 3]", style=solid]; +"232 Constant_427" -> "234 Gather_429" [label="[]", style=dashed]; +"233 Shape_428" -> "234 Gather_429" [label="[4]", style=dashed]; +"234 Gather_429" -> "238 Unsqueeze_432" [label="[]", style=dashed]; +"235 Constant_430" -> "240 Unsqueeze_433" [label="[]", style=dashed]; +"236 Constant_431" -> "242 Unsqueeze_434" [label="[]", style=dashed]; +"237 Constant_nncf_237" -> "238 Unsqueeze_432" [label="[1]", style=dashed]; +"238 Unsqueeze_432" -> "243 Concat_435" [label="[1]", style=dashed]; +"239 Constant_nncf_239" -> "240 Unsqueeze_433" [label="[1]", style=dashed]; +"240 Unsqueeze_433" -> "243 Concat_435" [label="[1]", style=dashed]; +"241 Constant_nncf_241" -> "242 Unsqueeze_434" [label="[1]", style=dashed]; +"242 Unsqueeze_434" -> "243 Concat_435" [label="[1]", style=dashed]; +"243 Concat_435" -> "244 Reshape_436" [label="[3]", style=dashed]; +"244 Reshape_436" -> "287 Concat_470" [label="[]", style=solid]; +"245 Conv_437" -> "258 Reshape_447" [label="[1, 324, 3, 3]", style=solid]; +"246 Constant_438" -> "248 Gather_440" [label="[]", style=dashed]; +"247 Shape_439" -> "248 Gather_440" [label="[4]", style=dashed]; +"248 Gather_440" -> "252 Unsqueeze_443" [label="[]", style=dashed]; +"249 Constant_441" -> "254 Unsqueeze_444" [label="[]", style=dashed]; +"250 Constant_442" -> "256 Unsqueeze_445" [label="[]", style=dashed]; +"251 Constant_nncf_251" -> "252 Unsqueeze_443" [label="[1]", style=dashed]; +"252 Unsqueeze_443" -> "257 Concat_446" [label="[1]", style=dashed]; +"253 Constant_nncf_253" -> "254 Unsqueeze_444" [label="[1]", style=dashed]; +"254 Unsqueeze_444" -> "257 Concat_446" [label="[1]", style=dashed]; +"255 Constant_nncf_255" -> "256 Unsqueeze_445" [label="[1]", style=dashed]; +"256 Unsqueeze_445" -> "257 Concat_446" [label="[1]", style=dashed]; +"257 Concat_446" -> "258 Reshape_447" [label="[3]", style=dashed]; +"258 Reshape_447" -> "288 Concat_471" [label="[]", style=solid]; +"259 Conv_448" -> "272 Reshape_458" [label="[1, 16, 3, 3]", style=solid]; +"260 Constant_449" -> "262 Gather_451" [label="[]", style=dashed]; +"261 Shape_450" -> "262 Gather_451" [label="[4]", style=dashed]; +"262 Gather_451" -> "266 Unsqueeze_454" [label="[]", style=dashed]; +"263 Constant_452" -> "268 Unsqueeze_455" [label="[]", style=dashed]; +"264 Constant_453" -> "270 Unsqueeze_456" [label="[]", style=dashed]; +"265 Constant_nncf_265" -> "266 Unsqueeze_454" [label="[1]", style=dashed]; +"266 Unsqueeze_454" -> "271 Concat_457" [label="[1]", style=dashed]; +"267 Constant_nncf_267" -> "268 Unsqueeze_455" [label="[1]", style=dashed]; +"268 Unsqueeze_455" -> "271 Concat_457" [label="[1]", style=dashed]; +"269 Constant_nncf_269" -> "270 Unsqueeze_456" [label="[1]", style=dashed]; +"270 Unsqueeze_456" -> "271 Concat_457" [label="[1]", style=dashed]; +"271 Concat_457" -> "272 Reshape_458" [label="[3]", style=dashed]; +"272 Reshape_458" -> "287 Concat_470" [label="[]", style=solid]; +"273 Conv_459" -> "286 Reshape_469" [label="[1, 324, 3, 3]", style=solid]; +"274 Constant_460" -> "276 Gather_462" [label="[]", style=dashed]; +"275 Shape_461" -> "276 Gather_462" [label="[4]", style=dashed]; +"276 Gather_462" -> "280 Unsqueeze_465" [label="[]", style=dashed]; +"277 Constant_463" -> "282 Unsqueeze_466" [label="[]", style=dashed]; +"278 Constant_464" -> "284 Unsqueeze_467" [label="[]", style=dashed]; +"279 Constant_nncf_279" -> "280 Unsqueeze_465" [label="[1]", style=dashed]; +"280 Unsqueeze_465" -> "285 Concat_468" [label="[1]", style=dashed]; +"281 Constant_nncf_281" -> "282 Unsqueeze_466" [label="[1]", style=dashed]; +"282 Unsqueeze_466" -> "285 Concat_468" [label="[1]", style=dashed]; +"283 Constant_nncf_283" -> "284 Unsqueeze_467" [label="[1]", style=dashed]; +"284 Unsqueeze_467" -> "285 Concat_468" [label="[1]", style=dashed]; +"285 Concat_468" -> "286 Reshape_469" [label="[3]", style=dashed]; +"286 Reshape_469" -> "288 Concat_471" [label="[]", style=solid]; +"287 Concat_470" -> "289 Transpose_472" [label="[]", style=solid]; +"288 Concat_471" -> "290 Transpose_473" [label="[]", style=solid]; +"289 Transpose_472" -> "295 Slice_478" [label="[]", style=solid]; +"289 Transpose_472" -> "312 Slice_495" [label="[]", style=solid]; +"290 Transpose_473" -> "489 Shape_nncf_489" [label="[]", style=solid]; +"290 Transpose_473" -> "490 Flatten_nncf_490" [label="[]", style=solid]; +"291 Constant_474" -> "295 Slice_478" [label="[1]", style=dashed]; +"292 Constant_475" -> "295 Slice_478" [label="[1]", style=dashed]; +"293 Constant_476" -> "295 Slice_478" [label="[1]", style=dashed]; +"294 Constant_477" -> "295 Slice_478" [label="[1]", style=dashed]; +"295 Slice_478" -> "300 Slice_483" [label="[]", style=solid]; +"296 Constant_479" -> "300 Slice_483" [label="[1]", style=dashed]; +"297 Constant_480" -> "300 Slice_483" [label="[1]", style=dashed]; +"298 Constant_481" -> "300 Slice_483" [label="[1]", style=dashed]; +"299 Constant_482" -> "300 Slice_483" [label="[1]", style=dashed]; +"300 Slice_483" -> "305 Slice_488" [label="[]", style=solid]; +"301 Constant_484" -> "305 Slice_488" [label="[1]", style=dashed]; +"302 Constant_485" -> "305 Slice_488" [label="[1]", style=dashed]; +"303 Constant_486" -> "305 Slice_488" [label="[1]", style=dashed]; +"304 Constant_487" -> "305 Slice_488" [label="[1]", style=dashed]; +"305 Slice_488" -> "307 Mul_490" [label="[]", style=solid]; +"306 Constant_489" -> "307 Mul_490" [label="[]", style=solid]; +"307 Mul_490" -> "326 Mul_509" [label="[]", style=solid]; +"308 Constant_491" -> "312 Slice_495" [label="[1]", style=dashed]; +"309 Constant_492" -> "312 Slice_495" [label="[1]", style=dashed]; +"310 Constant_493" -> "312 Slice_495" [label="[1]", style=dashed]; +"311 Constant_494" -> "312 Slice_495" [label="[1]", style=dashed]; +"312 Slice_495" -> "317 Slice_500" [label="[]", style=solid]; +"313 Constant_496" -> "317 Slice_500" [label="[1]", style=dashed]; +"314 Constant_497" -> "317 Slice_500" [label="[1]", style=dashed]; +"315 Constant_498" -> "317 Slice_500" [label="[1]", style=dashed]; +"316 Constant_499" -> "317 Slice_500" [label="[1]", style=dashed]; +"317 Slice_500" -> "322 Slice_505" [label="[]", style=solid]; +"318 Constant_501" -> "322 Slice_505" [label="[1]", style=dashed]; +"319 Constant_502" -> "322 Slice_505" [label="[1]", style=dashed]; +"320 Constant_503" -> "322 Slice_505" [label="[1]", style=dashed]; +"321 Constant_504" -> "322 Slice_505" [label="[1]", style=dashed]; +"322 Slice_505" -> "324 Mul_507" [label="[]", style=solid]; +"323 Constant_506" -> "324 Mul_507" [label="[]", style=solid]; +"324 Mul_507" -> "329 Exp_512" [label="[]", style=solid]; +"325 Constant_508" -> "326 Mul_509" [label="[1, 15130, 2]", style=solid]; +"326 Mul_509" -> "328 Add_511" [label="[]", style=solid]; +"327 Constant_510" -> "328 Add_511" [label="[1, 15130, 2]", style=solid]; +"328 Add_511" -> "336 Slice_519" [label="[]", style=solid]; +"328 Add_511" -> "373 Slice_554" [label="[]", style=solid]; +"328 Add_511" -> "410 Slice_589" [label="[]", style=solid]; +"328 Add_511" -> "447 Slice_624" [label="[]", style=solid]; +"329 Exp_512" -> "331 Mul_514" [label="[]", style=solid]; +"330 Constant_513" -> "331 Mul_514" [label="[1, 15130, 2]", style=solid]; +"331 Mul_514" -> "353 Slice_535" [label="[]", style=solid]; +"331 Mul_514" -> "390 Slice_570" [label="[]", style=solid]; +"331 Mul_514" -> "427 Slice_605" [label="[]", style=solid]; +"331 Mul_514" -> "464 Slice_640" [label="[]", style=solid]; +"332 Constant_515" -> "336 Slice_519" [label="[1]", style=dashed]; +"333 Constant_516" -> "336 Slice_519" [label="[1]", style=dashed]; +"334 Constant_517" -> "336 Slice_519" [label="[1]", style=dashed]; +"335 Constant_518" -> "336 Slice_519" [label="[1]", style=dashed]; +"336 Slice_519" -> "341 Slice_524" [label="[]", style=solid]; +"337 Constant_520" -> "341 Slice_524" [label="[1]", style=dashed]; +"338 Constant_521" -> "341 Slice_524" [label="[1]", style=dashed]; +"339 Constant_522" -> "341 Slice_524" [label="[1]", style=dashed]; +"340 Constant_523" -> "341 Slice_524" [label="[1]", style=dashed]; +"341 Slice_524" -> "346 Slice_529" [label="[]", style=solid]; +"342 Constant_525" -> "346 Slice_529" [label="[1]", style=dashed]; +"343 Constant_526" -> "346 Slice_529" [label="[1]", style=dashed]; +"344 Constant_527" -> "346 Slice_529" [label="[1]", style=dashed]; +"345 Constant_528" -> "346 Slice_529" [label="[1]", style=dashed]; +"346 Slice_529" -> "348 Squeeze_530" [label="[]", style=solid]; +"347 Constant_nncf_347" -> "348 Squeeze_530" [label="[1]", style=dashed]; +"348 Squeeze_530" -> "368 Sub_549" [label="[]", style=solid]; +"349 Constant_531" -> "353 Slice_535" [label="[1]", style=dashed]; +"350 Constant_532" -> "353 Slice_535" [label="[1]", style=dashed]; +"351 Constant_533" -> "353 Slice_535" [label="[1]", style=dashed]; +"352 Constant_534" -> "353 Slice_535" [label="[1]", style=dashed]; +"353 Slice_535" -> "358 Slice_540" [label="[]", style=solid]; +"354 Constant_536" -> "358 Slice_540" [label="[1]", style=dashed]; +"355 Constant_537" -> "358 Slice_540" [label="[1]", style=dashed]; +"356 Constant_538" -> "358 Slice_540" [label="[1]", style=dashed]; +"357 Constant_539" -> "358 Slice_540" [label="[1]", style=dashed]; +"358 Slice_540" -> "363 Slice_545" [label="[]", style=solid]; +"359 Constant_541" -> "363 Slice_545" [label="[1]", style=dashed]; +"360 Constant_542" -> "363 Slice_545" [label="[1]", style=dashed]; +"361 Constant_543" -> "363 Slice_545" [label="[1]", style=dashed]; +"362 Constant_544" -> "363 Slice_545" [label="[1]", style=dashed]; +"363 Slice_545" -> "365 Squeeze_546" [label="[]", style=solid]; +"364 Constant_nncf_364" -> "365 Squeeze_546" [label="[1]", style=dashed]; +"365 Squeeze_546" -> "367 Mul_548" [label="[]", style=solid]; +"366 Constant_547" -> "367 Mul_548" [label="[]", style=solid]; +"367 Mul_548" -> "368 Sub_549" [label="[]", style=solid]; +"368 Sub_549" -> "481 Unsqueeze_655" [label="[]", style=solid]; +"369 Constant_550" -> "373 Slice_554" [label="[1]", style=dashed]; +"370 Constant_551" -> "373 Slice_554" [label="[1]", style=dashed]; +"371 Constant_552" -> "373 Slice_554" [label="[1]", style=dashed]; +"372 Constant_553" -> "373 Slice_554" [label="[1]", style=dashed]; +"373 Slice_554" -> "378 Slice_559" [label="[]", style=solid]; +"374 Constant_555" -> "378 Slice_559" [label="[1]", style=dashed]; +"375 Constant_556" -> "378 Slice_559" [label="[1]", style=dashed]; +"376 Constant_557" -> "378 Slice_559" [label="[1]", style=dashed]; +"377 Constant_558" -> "378 Slice_559" [label="[1]", style=dashed]; +"378 Slice_559" -> "383 Slice_564" [label="[]", style=solid]; +"379 Constant_560" -> "383 Slice_564" [label="[1]", style=dashed]; +"380 Constant_561" -> "383 Slice_564" [label="[1]", style=dashed]; +"381 Constant_562" -> "383 Slice_564" [label="[1]", style=dashed]; +"382 Constant_563" -> "383 Slice_564" [label="[1]", style=dashed]; +"383 Slice_564" -> "385 Squeeze_565" [label="[]", style=solid]; +"384 Constant_nncf_384" -> "385 Squeeze_565" [label="[1]", style=dashed]; +"385 Squeeze_565" -> "405 Sub_584" [label="[]", style=solid]; +"386 Constant_566" -> "390 Slice_570" [label="[1]", style=dashed]; +"387 Constant_567" -> "390 Slice_570" [label="[1]", style=dashed]; +"388 Constant_568" -> "390 Slice_570" [label="[1]", style=dashed]; +"389 Constant_569" -> "390 Slice_570" [label="[1]", style=dashed]; +"390 Slice_570" -> "395 Slice_575" [label="[]", style=solid]; +"391 Constant_571" -> "395 Slice_575" [label="[1]", style=dashed]; +"392 Constant_572" -> "395 Slice_575" [label="[1]", style=dashed]; +"393 Constant_573" -> "395 Slice_575" [label="[1]", style=dashed]; +"394 Constant_574" -> "395 Slice_575" [label="[1]", style=dashed]; +"395 Slice_575" -> "400 Slice_580" [label="[]", style=solid]; +"396 Constant_576" -> "400 Slice_580" [label="[1]", style=dashed]; +"397 Constant_577" -> "400 Slice_580" [label="[1]", style=dashed]; +"398 Constant_578" -> "400 Slice_580" [label="[1]", style=dashed]; +"399 Constant_579" -> "400 Slice_580" [label="[1]", style=dashed]; +"400 Slice_580" -> "402 Squeeze_581" [label="[]", style=solid]; +"401 Constant_nncf_401" -> "402 Squeeze_581" [label="[1]", style=dashed]; +"402 Squeeze_581" -> "404 Mul_583" [label="[]", style=solid]; +"403 Constant_582" -> "404 Mul_583" [label="[]", style=solid]; +"404 Mul_583" -> "405 Sub_584" [label="[]", style=solid]; +"405 Sub_584" -> "483 Unsqueeze_656" [label="[]", style=solid]; +"406 Constant_585" -> "410 Slice_589" [label="[1]", style=dashed]; +"407 Constant_586" -> "410 Slice_589" [label="[1]", style=dashed]; +"408 Constant_587" -> "410 Slice_589" [label="[1]", style=dashed]; +"409 Constant_588" -> "410 Slice_589" [label="[1]", style=dashed]; +"410 Slice_589" -> "415 Slice_594" [label="[]", style=solid]; +"411 Constant_590" -> "415 Slice_594" [label="[1]", style=dashed]; +"412 Constant_591" -> "415 Slice_594" [label="[1]", style=dashed]; +"413 Constant_592" -> "415 Slice_594" [label="[1]", style=dashed]; +"414 Constant_593" -> "415 Slice_594" [label="[1]", style=dashed]; +"415 Slice_594" -> "420 Slice_599" [label="[]", style=solid]; +"416 Constant_595" -> "420 Slice_599" [label="[1]", style=dashed]; +"417 Constant_596" -> "420 Slice_599" [label="[1]", style=dashed]; +"418 Constant_597" -> "420 Slice_599" [label="[1]", style=dashed]; +"419 Constant_598" -> "420 Slice_599" [label="[1]", style=dashed]; +"420 Slice_599" -> "422 Squeeze_600" [label="[]", style=solid]; +"421 Constant_nncf_421" -> "422 Squeeze_600" [label="[1]", style=dashed]; +"422 Squeeze_600" -> "442 Add_619" [label="[]", style=solid]; +"423 Constant_601" -> "427 Slice_605" [label="[1]", style=dashed]; +"424 Constant_602" -> "427 Slice_605" [label="[1]", style=dashed]; +"425 Constant_603" -> "427 Slice_605" [label="[1]", style=dashed]; +"426 Constant_604" -> "427 Slice_605" [label="[1]", style=dashed]; +"427 Slice_605" -> "432 Slice_610" [label="[]", style=solid]; +"428 Constant_606" -> "432 Slice_610" [label="[1]", style=dashed]; +"429 Constant_607" -> "432 Slice_610" [label="[1]", style=dashed]; +"430 Constant_608" -> "432 Slice_610" [label="[1]", style=dashed]; +"431 Constant_609" -> "432 Slice_610" [label="[1]", style=dashed]; +"432 Slice_610" -> "437 Slice_615" [label="[]", style=solid]; +"433 Constant_611" -> "437 Slice_615" [label="[1]", style=dashed]; +"434 Constant_612" -> "437 Slice_615" [label="[1]", style=dashed]; +"435 Constant_613" -> "437 Slice_615" [label="[1]", style=dashed]; +"436 Constant_614" -> "437 Slice_615" [label="[1]", style=dashed]; +"437 Slice_615" -> "439 Squeeze_616" [label="[]", style=solid]; +"438 Constant_nncf_438" -> "439 Squeeze_616" [label="[1]", style=dashed]; +"439 Squeeze_616" -> "441 Mul_618" [label="[]", style=solid]; +"440 Constant_617" -> "441 Mul_618" [label="[]", style=solid]; +"441 Mul_618" -> "442 Add_619" [label="[]", style=solid]; +"442 Add_619" -> "485 Unsqueeze_657" [label="[]", style=solid]; +"443 Constant_620" -> "447 Slice_624" [label="[1]", style=dashed]; +"444 Constant_621" -> "447 Slice_624" [label="[1]", style=dashed]; +"445 Constant_622" -> "447 Slice_624" [label="[1]", style=dashed]; +"446 Constant_623" -> "447 Slice_624" [label="[1]", style=dashed]; +"447 Slice_624" -> "452 Slice_629" [label="[]", style=solid]; +"448 Constant_625" -> "452 Slice_629" [label="[1]", style=dashed]; +"449 Constant_626" -> "452 Slice_629" [label="[1]", style=dashed]; +"450 Constant_627" -> "452 Slice_629" [label="[1]", style=dashed]; +"451 Constant_628" -> "452 Slice_629" [label="[1]", style=dashed]; +"452 Slice_629" -> "457 Slice_634" [label="[]", style=solid]; +"453 Constant_630" -> "457 Slice_634" [label="[1]", style=dashed]; +"454 Constant_631" -> "457 Slice_634" [label="[1]", style=dashed]; +"455 Constant_632" -> "457 Slice_634" [label="[1]", style=dashed]; +"456 Constant_633" -> "457 Slice_634" [label="[1]", style=dashed]; +"457 Slice_634" -> "459 Squeeze_635" [label="[]", style=solid]; +"458 Constant_nncf_458" -> "459 Squeeze_635" [label="[1]", style=dashed]; +"459 Squeeze_635" -> "479 Add_654" [label="[]", style=solid]; +"460 Constant_636" -> "464 Slice_640" [label="[1]", style=dashed]; +"461 Constant_637" -> "464 Slice_640" [label="[1]", style=dashed]; +"462 Constant_638" -> "464 Slice_640" [label="[1]", style=dashed]; +"463 Constant_639" -> "464 Slice_640" [label="[1]", style=dashed]; +"464 Slice_640" -> "469 Slice_645" [label="[]", style=solid]; +"465 Constant_641" -> "469 Slice_645" [label="[1]", style=dashed]; +"466 Constant_642" -> "469 Slice_645" [label="[1]", style=dashed]; +"467 Constant_643" -> "469 Slice_645" [label="[1]", style=dashed]; +"468 Constant_644" -> "469 Slice_645" [label="[1]", style=dashed]; +"469 Slice_645" -> "474 Slice_650" [label="[]", style=solid]; +"470 Constant_646" -> "474 Slice_650" [label="[1]", style=dashed]; +"471 Constant_647" -> "474 Slice_650" [label="[1]", style=dashed]; +"472 Constant_648" -> "474 Slice_650" [label="[1]", style=dashed]; +"473 Constant_649" -> "474 Slice_650" [label="[1]", style=dashed]; +"474 Slice_650" -> "476 Squeeze_651" [label="[]", style=solid]; +"475 Constant_nncf_475" -> "476 Squeeze_651" [label="[1]", style=dashed]; +"476 Squeeze_651" -> "478 Mul_653" [label="[]", style=solid]; +"477 Constant_652" -> "478 Mul_653" [label="[]", style=solid]; +"478 Mul_653" -> "479 Add_654" [label="[]", style=solid]; +"479 Add_654" -> "487 Unsqueeze_658" [label="[]", style=solid]; +"480 Constant_nncf_480" -> "481 Unsqueeze_655" [label="[1]", style=dashed]; +"481 Unsqueeze_655" -> "488 Concat_659" [label="[]", style=solid]; +"482 Constant_nncf_482" -> "483 Unsqueeze_656" [label="[1]", style=dashed]; +"483 Unsqueeze_656" -> "488 Concat_659" [label="[]", style=solid]; +"484 Constant_nncf_484" -> "485 Unsqueeze_657" [label="[1]", style=dashed]; +"485 Unsqueeze_657" -> "488 Concat_659" [label="[]", style=solid]; +"486 Constant_nncf_486" -> "487 Unsqueeze_658" [label="[1]", style=dashed]; +"487 Unsqueeze_658" -> "488 Concat_659" [label="[]", style=solid]; +"488 Concat_659" -> "515 NonMaxSuppression_683" [label="[]", style=solid]; +"488 Concat_659" -> "553 Squeeze_719" [label="[]", style=solid]; +"489 Shape_nncf_489" -> "492 Reshape_nncf_492" [label="[-1]", style=dashed]; +"490 Flatten_nncf_490" -> "491 Softmax_660" [label="[]", style=solid]; +"491 Softmax_660" -> "492 Reshape_nncf_492" [label="[]", style=solid]; +"492 Reshape_nncf_492" -> "493 Transpose_661" [label="[]", style=solid]; +"493 Transpose_661" -> "498 Slice_666" [label="[]", style=solid]; +"494 Constant_662" -> "498 Slice_666" [label="[1]", style=dashed]; +"495 Constant_663" -> "498 Slice_666" [label="[1]", style=dashed]; +"496 Constant_664" -> "498 Slice_666" [label="[1]", style=dashed]; +"497 Constant_665" -> "498 Slice_666" [label="[1]", style=dashed]; +"498 Slice_666" -> "503 Slice_671" [label="[]", style=solid]; +"499 Constant_667" -> "503 Slice_671" [label="[1]", style=dashed]; +"500 Constant_668" -> "503 Slice_671" [label="[1]", style=dashed]; +"501 Constant_669" -> "503 Slice_671" [label="[1]", style=dashed]; +"502 Constant_670" -> "503 Slice_671" [label="[1]", style=dashed]; +"503 Slice_671" -> "508 Slice_676" [label="[]", style=solid]; +"504 Constant_672" -> "508 Slice_676" [label="[1]", style=dashed]; +"505 Constant_673" -> "508 Slice_676" [label="[1]", style=dashed]; +"506 Constant_674" -> "508 Slice_676" [label="[1]", style=dashed]; +"507 Constant_675" -> "508 Slice_676" [label="[1]", style=dashed]; +"508 Slice_676" -> "515 NonMaxSuppression_683" [label="[]", style=solid]; +"508 Slice_676" -> "531 Reshape_699" [label="[]", style=solid]; +"508 Slice_676" -> "532 Shape_700" [label="[]", style=solid]; +"509 Constant_677" -> "510 ConstantOfShape_678" [label="[1]", style=dashed]; +"510 ConstantOfShape_678" -> "515 NonMaxSuppression_683" [label="[1]", style=dashed]; +"511 Constant_679" -> "512 ConstantOfShape_680" [label="[1]", style=dashed]; +"512 ConstantOfShape_680" -> "515 NonMaxSuppression_683" [label="[1]", style=solid]; +"513 Constant_681" -> "514 ConstantOfShape_682" [label="[1]", style=dashed]; +"514 ConstantOfShape_682" -> "515 NonMaxSuppression_683" [label="[1]", style=solid]; +"515 NonMaxSuppression_683" -> "520 Slice_688" [label="[-1, 3]", style=dashed]; +"515 NonMaxSuppression_683" -> "527 Slice_695" [label="[-1, 3]", style=dashed]; +"516 Constant_684" -> "520 Slice_688" [label="[1]", style=dashed]; +"517 Constant_685" -> "520 Slice_688" [label="[1]", style=dashed]; +"518 Constant_686" -> "520 Slice_688" [label="[1]", style=dashed]; +"519 Constant_687" -> "520 Slice_688" [label="[1]", style=dashed]; +"520 Slice_688" -> "522 Gather_690" [label="[-1, 3]", style=dashed]; +"521 Constant_689" -> "522 Gather_690" [label="[]", style=dashed]; +"522 Gather_690" -> "535 Mul_703" [label="[-1]", style=dashed]; +"522 Gather_690" -> "564 Gather_729" [label="[-1]", style=dashed]; +"523 Constant_691" -> "527 Slice_695" [label="[1]", style=dashed]; +"524 Constant_692" -> "527 Slice_695" [label="[1]", style=dashed]; +"525 Constant_693" -> "527 Slice_695" [label="[1]", style=dashed]; +"526 Constant_694" -> "527 Slice_695" [label="[1]", style=dashed]; +"527 Slice_695" -> "529 Gather_697" [label="[-1, 3]", style=dashed]; +"528 Constant_696" -> "529 Gather_697" [label="[]", style=dashed]; +"529 Gather_697" -> "536 Add_704" [label="[-1]", style=dashed]; +"529 Gather_697" -> "554 Gather_720" [label="[-1]", style=dashed]; +"530 Constant_698" -> "531 Reshape_699" [label="[1]", style=dashed]; +"531 Reshape_699" -> "538 Gather_706" [label="[-1]", style=solid]; +"532 Shape_700" -> "534 Gather_702" [label="[-1]", style=dashed]; +"533 Constant_701" -> "534 Gather_702" [label="[]", style=dashed]; +"534 Gather_702" -> "535 Mul_703" [label="[]", style=dashed]; +"535 Mul_703" -> "536 Add_704" [label="[-1]", style=dashed]; +"536 Add_704" -> "537 Cast_705" [label="[-1]", style=dashed]; +"537 Cast_705" -> "538 Gather_706" [label="[-1]", style=dashed]; +"538 Gather_706" -> "539 Shape_707" [label="[-1]", style=solid]; +"538 Gather_706" -> "551 TopK_717" [label="[-1]", style=solid]; +"538 Gather_706" -> "569 Gather_733" [label="[-1]", style=solid]; +"539 Shape_707" -> "541 Gather_709" [label="[1]", style=dashed]; +"540 Constant_708" -> "541 Gather_709" [label="[]", style=dashed]; +"541 Gather_709" -> "543 Unsqueeze_710" [label="[]", style=dashed]; +"542 Constant_nncf_542" -> "543 Unsqueeze_710" [label="[1]", style=dashed]; +"543 Unsqueeze_710" -> "545 Concat_712" [label="[1]", style=dashed]; +"544 Constant_711" -> "545 Concat_712" [label="[1]", style=dashed]; +"545 Concat_712" -> "546 Cast_713" [label="[2]", style=dashed]; +"546 Cast_713" -> "547 ReduceMin_714" [label="[2]", style=dashed]; +"547 ReduceMin_714" -> "548 Cast_715" [label="[]", style=dashed]; +"548 Cast_715" -> "550 Unsqueeze_716" [label="[]", style=dashed]; +"549 Constant_nncf_549" -> "550 Unsqueeze_716" [label="[1]", style=dashed]; +"550 Unsqueeze_716" -> "551 TopK_717" [label="[1]", style=dashed]; +"551 TopK_717" -> "554 Gather_720" [label="[-1]", style=dashed]; +"551 TopK_717" -> "564 Gather_729" [label="[-1]", style=dashed]; +"551 TopK_717" -> "569 Gather_733" [label="[-1]", style=dashed]; +"552 Constant_nncf_552" -> "553 Squeeze_719" [label="[1]", style=dashed]; +"553 Squeeze_719" -> "559 Slice_725" [label="[]", style=solid]; +"554 Gather_720" -> "560 Cast_726" [label="[-1]", style=dashed]; +"555 Constant_721" -> "559 Slice_725" [label="[1]", style=dashed]; +"556 Constant_722" -> "559 Slice_725" [label="[1]", style=dashed]; +"557 Constant_723" -> "559 Slice_725" [label="[1]", style=dashed]; +"558 Constant_724" -> "559 Slice_725" [label="[1]", style=dashed]; +"559 Slice_725" -> "561 Gather_727" [label="[]", style=solid]; +"560 Cast_726" -> "561 Gather_727" [label="[-1]", style=dashed]; +"561 Gather_727" -> "563 Unsqueeze_bboxes" [label="[]", style=solid]; +"562 Constant_nncf_562" -> "563 Unsqueeze_bboxes" [label="[1]", style=dashed]; +"563 Unsqueeze_bboxes" -> "573 nncf_model_output_0" [label="[1, -1, 4]", style=solid]; +"564 Gather_729" -> "566 Unsqueeze_730" [label="[-1]", style=dashed]; +"565 Constant_nncf_565" -> "566 Unsqueeze_730" [label="[1]", style=dashed]; +"566 Unsqueeze_730" -> "568 Add_labels" [label="[1, -1]", style=dashed]; +"567 Constant_731" -> "568 Add_labels" [label="[]", style=dashed]; +"568 Add_labels" -> "574 nncf_model_output_1" [label="[1, -1]", style=dashed]; +"569 Gather_733" -> "571 Unsqueeze_scores" [label="[-1]", style=solid]; +"570 Constant_nncf_570" -> "571 Unsqueeze_scores" [label="[1]", style=dashed]; +"571 Unsqueeze_scores" -> "575 nncf_model_output_2" [label="[1, -1]", style=solid]; +"572 nncf_model_input_0" -> "0 Conv_219" [label="[1, 3, 1200, 1200]", style=solid]; } diff --git a/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/double_input_output_model.dot b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/double_input_output_model.dot index cec9e2bd598..c2b1e7098c4 100644 --- a/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/double_input_output_model.dot +++ b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/double_input_output_model.dot @@ -5,7 +5,7 @@ strict digraph { "3 nncf_model_input_1" [id=3, type=nncf_model_input]; "4 nncf_model_output_0" [id=4, type=nncf_model_output]; "5 nncf_model_output_1" [id=5, type=nncf_model_output]; -"0 Add2" -> "4 nncf_model_output_0" [label="[2, 6, 3, 3", style=solid]; +"0 Add2" -> "4 nncf_model_output_0" [label="[2, 6, 3, 3]", style=solid]; "1 Add1" -> "5 nncf_model_output_1" [label="[2, 6, 3, 3]", style=solid]; "2 nncf_model_input_0" -> "0 Add2" [label="[1, 6, 3, 3]", style=solid]; "2 nncf_model_input_0" -> "1 Add1" [label="[1, 6, 3, 3]", style=solid]; diff --git a/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/embedding_model.dot b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/embedding_model.dot index 5cf14d30c4f..0b70a6d3064 100644 --- a/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/embedding_model.dot +++ b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/embedding_model.dot @@ -8,6 +8,6 @@ strict digraph { "0 Identity" -> "1 Embedding" [label="[10, 20]", style=solid]; "1 Embedding" -> "2 Gather" [label="[1, 10, 20]", style=solid]; "2 Gather" -> "3 MatMul" [label="[10, 20]", style=solid]; -"3 MatMul" -> "5 nncf_model_output_0" [label="[1, 10]", style=solid]; +"3 MatMul" -> "5 nncf_model_output_0" [label="[10, 10]", style=solid]; "4 nncf_model_input_0" -> "1 Embedding" [label="[1, 10]", style=dashed]; } diff --git a/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/shape_of_model.dot b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/shape_of_model.dot index 781014be0d9..6120b51edc9 100644 --- a/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/shape_of_model.dot +++ b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/shape_of_model.dot @@ -18,7 +18,7 @@ strict digraph { "4 Cast1" -> "5 Cast2" [label="[]", style=dashed]; "5 Cast2" -> "6 Sqrt" [label="[]", style=solid]; "6 Sqrt" -> "7 Reshape" [label="[]", style=solid]; -"7 Reshape" -> "8 Conv2" [label="[1]", style=solid]; +"7 Reshape" -> "8 Conv2" [label="[]", style=solid]; "8 Conv2" -> "10 nncf_model_output_0" [label="[1, 10, 1, 1]", style=solid]; "9 nncf_model_input_0" -> "0 Conv1" [label="[1, 3, 32, 32]", style=solid]; } diff --git a/tests/onnx/data/reference_graphs/quantization/bertsquad-12.dot b/tests/onnx/data/reference_graphs/quantization/bertsquad-12.dot index 5e2502ba0c4..b7b0c044565 100644 --- a/tests/onnx/data/reference_graphs/quantization/bertsquad-12.dot +++ b/tests/onnx/data/reference_graphs/quantization/bertsquad-12.dot @@ -1,3278 +1,3758 @@ strict digraph { "0 unique_ids_graph_outputs_Identity__10" [id=0, type=Identity]; -"1 bert/encoder/ones/packed_Unsqueeze__20" [id=1, type=Unsqueeze]; -"2 bert/encoder/ones/packed_Unsqueeze__19" [id=2, type=Unsqueeze]; -"3 bert/encoder/layer_9/attention/self/Reshape_3/shape_Unsqueeze__83" [id=3, type=Unsqueeze]; -"4 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__88" [id=4, type=Unsqueeze]; -"5 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__87" [id=5, type=Unsqueeze]; -"6 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__86" [id=6, type=Unsqueeze]; -"7 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__93" [id=7, type=Unsqueeze]; -"8 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__92" [id=8, type=Unsqueeze]; -"9 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__91" [id=9, type=Unsqueeze]; -"10 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__98" [id=10, type=Unsqueeze]; -"11 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__97" [id=11, type=Unsqueeze]; -"12 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__96" [id=12, type=Unsqueeze]; -"13 bert/encoder/layer_8/attention/self/Reshape_3/shape_Unsqueeze__101" [id=13, type=Unsqueeze]; -"14 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__106" [id=14, type=Unsqueeze]; -"15 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__105" [id=15, type=Unsqueeze]; -"16 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__104" [id=16, type=Unsqueeze]; -"17 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__111" [id=17, type=Unsqueeze]; -"18 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__110" [id=18, type=Unsqueeze]; -"19 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__109" [id=19, type=Unsqueeze]; -"20 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__116" [id=20, type=Unsqueeze]; -"21 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__115" [id=21, type=Unsqueeze]; -"22 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__114" [id=22, type=Unsqueeze]; -"23 bert/encoder/layer_7/attention/self/Reshape_3/shape_Unsqueeze__119" [id=23, type=Unsqueeze]; -"24 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__124" [id=24, type=Unsqueeze]; -"25 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__123" [id=25, type=Unsqueeze]; -"26 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__122" [id=26, type=Unsqueeze]; -"27 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__129" [id=27, type=Unsqueeze]; -"28 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__128" [id=28, type=Unsqueeze]; -"29 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__127" [id=29, type=Unsqueeze]; -"30 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__134" [id=30, type=Unsqueeze]; -"31 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__133" [id=31, type=Unsqueeze]; -"32 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__132" [id=32, type=Unsqueeze]; -"33 bert/encoder/layer_6/attention/self/Reshape_3/shape_Unsqueeze__137" [id=33, type=Unsqueeze]; -"34 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__142" [id=34, type=Unsqueeze]; -"35 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__141" [id=35, type=Unsqueeze]; -"36 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__140" [id=36, type=Unsqueeze]; -"37 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__147" [id=37, type=Unsqueeze]; -"38 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__146" [id=38, type=Unsqueeze]; -"39 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__145" [id=39, type=Unsqueeze]; -"40 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__152" [id=40, type=Unsqueeze]; -"41 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__151" [id=41, type=Unsqueeze]; -"42 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__150" [id=42, type=Unsqueeze]; -"43 bert/encoder/layer_5/attention/self/Reshape_3/shape_Unsqueeze__155" [id=43, type=Unsqueeze]; -"44 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__160" [id=44, type=Unsqueeze]; -"45 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__159" [id=45, type=Unsqueeze]; -"46 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__158" [id=46, type=Unsqueeze]; -"47 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__165" [id=47, type=Unsqueeze]; -"48 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__164" [id=48, type=Unsqueeze]; -"49 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__163" [id=49, type=Unsqueeze]; -"50 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__170" [id=50, type=Unsqueeze]; -"51 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__169" [id=51, type=Unsqueeze]; -"52 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__168" [id=52, type=Unsqueeze]; -"53 bert/encoder/layer_4/attention/self/Reshape_3/shape_Unsqueeze__173" [id=53, type=Unsqueeze]; -"54 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__178" [id=54, type=Unsqueeze]; -"55 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__177" [id=55, type=Unsqueeze]; -"56 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__176" [id=56, type=Unsqueeze]; -"57 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__183" [id=57, type=Unsqueeze]; -"58 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__182" [id=58, type=Unsqueeze]; -"59 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__181" [id=59, type=Unsqueeze]; -"60 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__188" [id=60, type=Unsqueeze]; -"61 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__187" [id=61, type=Unsqueeze]; -"62 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__186" [id=62, type=Unsqueeze]; -"63 bert/encoder/layer_3/attention/self/Reshape_3/shape_Unsqueeze__191" [id=63, type=Unsqueeze]; -"64 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__196" [id=64, type=Unsqueeze]; -"65 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__195" [id=65, type=Unsqueeze]; -"66 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__194" [id=66, type=Unsqueeze]; -"67 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__201" [id=67, type=Unsqueeze]; -"68 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__200" [id=68, type=Unsqueeze]; -"69 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__199" [id=69, type=Unsqueeze]; -"70 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__206" [id=70, type=Unsqueeze]; -"71 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__205" [id=71, type=Unsqueeze]; -"72 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__204" [id=72, type=Unsqueeze]; -"73 bert/encoder/layer_2/attention/self/Reshape_3/shape_Unsqueeze__209" [id=73, type=Unsqueeze]; -"74 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__214" [id=74, type=Unsqueeze]; -"75 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__213" [id=75, type=Unsqueeze]; -"76 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__212" [id=76, type=Unsqueeze]; -"77 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__219" [id=77, type=Unsqueeze]; -"78 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__218" [id=78, type=Unsqueeze]; -"79 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__217" [id=79, type=Unsqueeze]; -"80 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__224" [id=80, type=Unsqueeze]; -"81 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__223" [id=81, type=Unsqueeze]; -"82 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__222" [id=82, type=Unsqueeze]; -"83 bert/encoder/layer_11/attention/self/Reshape_3/shape_Unsqueeze__227" [id=83, type=Unsqueeze]; -"84 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__232" [id=84, type=Unsqueeze]; -"85 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__231" [id=85, type=Unsqueeze]; -"86 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__230" [id=86, type=Unsqueeze]; -"87 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__237" [id=87, type=Unsqueeze]; -"88 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__236" [id=88, type=Unsqueeze]; -"89 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__235" [id=89, type=Unsqueeze]; -"90 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__242" [id=90, type=Unsqueeze]; -"91 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__241" [id=91, type=Unsqueeze]; -"92 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__240" [id=92, type=Unsqueeze]; -"93 bert/encoder/layer_10/attention/self/Reshape_3/shape_Unsqueeze__245" [id=93, type=Unsqueeze]; -"94 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__250" [id=94, type=Unsqueeze]; -"95 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__249" [id=95, type=Unsqueeze]; -"96 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__248" [id=96, type=Unsqueeze]; -"97 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__255" [id=97, type=Unsqueeze]; -"98 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__254" [id=98, type=Unsqueeze]; -"99 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__253" [id=99, type=Unsqueeze]; -"100 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__260" [id=100, type=Unsqueeze]; -"101 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__259" [id=101, type=Unsqueeze]; -"102 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__258" [id=102, type=Unsqueeze]; -"103 bert/encoder/layer_1/attention/self/Reshape_3/shape_Unsqueeze__263" [id=103, type=Unsqueeze]; -"104 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__268" [id=104, type=Unsqueeze]; -"105 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__267" [id=105, type=Unsqueeze]; -"106 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__266" [id=106, type=Unsqueeze]; -"107 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__273" [id=107, type=Unsqueeze]; -"108 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__272" [id=108, type=Unsqueeze]; -"109 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__271" [id=109, type=Unsqueeze]; -"110 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__278" [id=110, type=Unsqueeze]; -"111 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__277" [id=111, type=Unsqueeze]; -"112 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__276" [id=112, type=Unsqueeze]; -"113 bert/encoder/layer_0/attention/self/Reshape_3/shape_Unsqueeze__281" [id=113, type=Unsqueeze]; -"114 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__286" [id=114, type=Unsqueeze]; -"115 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__285" [id=115, type=Unsqueeze]; -"116 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__284" [id=116, type=Unsqueeze]; -"117 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__291" [id=117, type=Unsqueeze]; -"118 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__290" [id=118, type=Unsqueeze]; -"119 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__289" [id=119, type=Unsqueeze]; -"120 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__296" [id=120, type=Unsqueeze]; -"121 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__295" [id=121, type=Unsqueeze]; -"122 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__294" [id=122, type=Unsqueeze]; -"123 bert/encoder/Shape" [id=123, type=Shape]; -"124 bert/encoder/Shape__12" [id=124, type=Cast]; -"125 bert/encoder/strided_slice" [id=125, type=Slice]; -"126 bert/encoder/strided_slice__16" [id=126, type=Squeeze]; -"127 bert/encoder/strided_slice__17" [id=127, type=Cast]; -"128 bert/encoder/ones/packed_Unsqueeze__18" [id=128, type=Unsqueeze]; -"129 bert/encoder/ones/packed_Concat__21" [id=129, type=Concat]; -"130 bert/encoder/ones__22" [id=130, type=Cast]; -"131 bert/encoder/ones" [id=131, type=ConstantOfShape]; -"132 bert/encoder/Reshape_13/shape_Unsqueeze__300" [id=132, type=Unsqueeze]; -"133 bert/encoder/Reshape_13/shape_Unsqueeze__299" [id=133, type=Unsqueeze]; -"134 bert/encoder/Reshape_1__302" [id=134, type=Cast]; -"135 bert/encoder/Reshape/shape_Unsqueeze__23" [id=135, type=Unsqueeze]; -"136 bert/encoder/Reshape/shape_Unsqueeze__25" [id=136, type=Unsqueeze]; -"137 bert/encoder/Reshape/shape_Unsqueeze__24" [id=137, type=Unsqueeze]; -"138 bert/encoder/Reshape/shape_Concat__26" [id=138, type=Concat]; -"139 bert/encoder/Reshape__27" [id=139, type=Cast]; -"140 bert/encoder/Reshape" [id=140, type=Reshape]; -"141 bert/encoder/Cast" [id=141, type=Cast]; -"142 bert/encoder/mul" [id=142, type=Mul]; -"143 bert/encoder/layer_9/attention/self/ExpandDims" [id=143, type=Reshape]; -"144 bert/encoder/layer_9/attention/self/sub" [id=144, type=Sub]; -"145 bert/encoder/layer_9/attention/self/mul_1" [id=145, type=Mul]; -"146 bert/encoder/layer_8/attention/self/ExpandDims" [id=146, type=Reshape]; -"147 bert/encoder/layer_8/attention/self/sub" [id=147, type=Sub]; -"148 bert/encoder/layer_8/attention/self/mul_1" [id=148, type=Mul]; -"149 bert/encoder/layer_7/attention/self/ExpandDims" [id=149, type=Reshape]; -"150 bert/encoder/layer_7/attention/self/sub" [id=150, type=Sub]; -"151 bert/encoder/layer_7/attention/self/mul_1" [id=151, type=Mul]; -"152 bert/encoder/layer_6/attention/self/ExpandDims" [id=152, type=Reshape]; -"153 bert/encoder/layer_6/attention/self/sub" [id=153, type=Sub]; -"154 bert/encoder/layer_6/attention/self/mul_1" [id=154, type=Mul]; -"155 bert/encoder/layer_5/attention/self/ExpandDims" [id=155, type=Reshape]; -"156 bert/encoder/layer_5/attention/self/sub" [id=156, type=Sub]; -"157 bert/encoder/layer_5/attention/self/mul_1" [id=157, type=Mul]; -"158 bert/encoder/layer_4/attention/self/ExpandDims" [id=158, type=Reshape]; -"159 bert/encoder/layer_4/attention/self/sub" [id=159, type=Sub]; -"160 bert/encoder/layer_4/attention/self/mul_1" [id=160, type=Mul]; -"161 bert/encoder/layer_3/attention/self/ExpandDims" [id=161, type=Reshape]; -"162 bert/encoder/layer_3/attention/self/sub" [id=162, type=Sub]; -"163 bert/encoder/layer_3/attention/self/mul_1" [id=163, type=Mul]; -"164 bert/encoder/layer_2/attention/self/ExpandDims" [id=164, type=Reshape]; -"165 bert/encoder/layer_2/attention/self/sub" [id=165, type=Sub]; -"166 bert/encoder/layer_2/attention/self/mul_1" [id=166, type=Mul]; -"167 bert/encoder/layer_11/attention/self/ExpandDims" [id=167, type=Reshape]; -"168 bert/encoder/layer_11/attention/self/sub" [id=168, type=Sub]; -"169 bert/encoder/layer_11/attention/self/mul_1" [id=169, type=Mul]; -"170 bert/encoder/layer_10/attention/self/ExpandDims" [id=170, type=Reshape]; -"171 bert/encoder/layer_10/attention/self/sub" [id=171, type=Sub]; -"172 bert/encoder/layer_10/attention/self/mul_1" [id=172, type=Mul]; -"173 bert/encoder/layer_1/attention/self/ExpandDims" [id=173, type=Reshape]; -"174 bert/encoder/layer_1/attention/self/sub" [id=174, type=Sub]; -"175 bert/encoder/layer_1/attention/self/mul_1" [id=175, type=Mul]; -"176 bert/encoder/layer_0/attention/self/ExpandDims" [id=176, type=Reshape]; -"177 bert/encoder/layer_0/attention/self/sub" [id=177, type=Sub]; -"178 bert/encoder/layer_0/attention/self/mul_1" [id=178, type=Mul]; -"179 bert/embeddings/Slice" [id=179, type=Slice]; -"180 bert/embeddings/Reshape_4__42" [id=180, type=Cast]; -"181 bert/embeddings/Reshape_4" [id=181, type=Reshape]; -"182 bert/embeddings/Reshape_3/shape_Unsqueeze__69" [id=182, type=Unsqueeze]; -"183 bert/embeddings/Reshape_3/shape_Unsqueeze__68" [id=183, type=Unsqueeze]; -"184 bert/embeddings/Reshape_2__43" [id=184, type=Cast]; -"185 bert/embeddings/Reshape_2" [id=185, type=Reshape]; -"186 bert/embeddings/Reshape_1/shape_Unsqueeze__57" [id=186, type=Unsqueeze]; -"187 bert/embeddings/Reshape_1/shape_Unsqueeze__56" [id=187, type=Unsqueeze]; -"188 bert/embeddings/Reshape__59" [id=188, type=Cast]; -"189 bert/embeddings/ExpandDims" [id=189, type=Reshape]; -"190 bert/embeddings/Shape" [id=190, type=Shape]; -"191 bert/embeddings/Shape__49" [id=191, type=Cast]; -"192 bert/embeddings/strided_slice" [id=192, type=Slice]; -"193 bert/embeddings/strided_slice__53" [id=193, type=Squeeze]; -"194 bert/embeddings/strided_slice__54" [id=194, type=Cast]; -"195 bert/embeddings/Reshape_1/shape_Unsqueeze__55" [id=195, type=Unsqueeze]; -"196 bert/embeddings/Reshape_1/shape_Concat__58" [id=196, type=Concat]; -"197 bert/embeddings/Reshape_1__60" [id=197, type=Cast]; -"198 bert/embeddings/Reshape" [id=198, type=Reshape]; -"199 QuantizeLinear_bert/embeddings/word_embeddings^0_1" [id=199, label="199 QuantizeLinear_bert/embeddings/word_embeddings:0_1", type=QuantizeLinear]; -"200 DequantizeLinear_bert/embeddings/word_embeddings^0_1" [id=200, label="200 DequantizeLinear_bert/embeddings/word_embeddings:0_1", type=DequantizeLinear]; -"201 bert/embeddings/GatherV2" [id=201, type=Gather]; -"202 bert/embeddings/Reshape_1" [id=202, type=Reshape]; -"203 bert/embeddings/Shape_1" [id=203, type=Shape]; -"204 bert/embeddings/Shape_1__61" [id=204, type=Cast]; -"205 bert/embeddings/strided_slice_1" [id=205, type=Slice]; -"206 bert/embeddings/strided_slice_1__65" [id=206, type=Squeeze]; -"207 bert/embeddings/strided_slice_1__66" [id=207, type=Cast]; -"208 bert/embeddings/Reshape_3/shape_Unsqueeze__67" [id=208, type=Unsqueeze]; -"209 bert/embeddings/Reshape_3/shape_Concat__70" [id=209, type=Concat]; -"210 bert/embeddings/Reshape_3__71" [id=210, type=Cast]; -"211 Unsqueeze__46" [id=211, type=Unsqueeze]; -"212 Unsqueeze__45" [id=212, type=Unsqueeze]; -"213 Unsqueeze__44" [id=213, type=Unsqueeze]; -"214 Reshape_1/shape_Unsqueeze__480" [id=214, type=Unsqueeze]; -"215 Reshape_1/shape_Unsqueeze__479" [id=215, type=Unsqueeze]; -"216 Reshape/shape_Unsqueeze__483" [id=216, type=Unsqueeze]; -"217 MatMul__486" [id=217, type=Transpose]; -"218 Concat__47" [id=218, type=Concat]; -"219 bert/embeddings/one_hot" [id=219, type=OneHot]; -"220 QuantizeLinear_bert/embeddings/one_hot^0_1" [id=220, label="220 QuantizeLinear_bert/embeddings/one_hot:0_1", type=QuantizeLinear]; -"221 DequantizeLinear_bert/embeddings/one_hot^0_1" [id=221, label="221 DequantizeLinear_bert/embeddings/one_hot:0_1", type=DequantizeLinear]; -"222 QuantizeLinear_bert/embeddings/token_type_embeddings^0_1" [id=222, label="222 QuantizeLinear_bert/embeddings/token_type_embeddings:0_1", type=QuantizeLinear]; -"223 DequantizeLinear_bert/embeddings/token_type_embeddings^0_1" [id=223, label="223 DequantizeLinear_bert/embeddings/token_type_embeddings:0_1", type=DequantizeLinear]; -"224 bert/embeddings/MatMul" [id=224, type=MatMul]; -"225 bert/embeddings/Reshape_3" [id=225, type=Reshape]; -"226 bert/embeddings/add" [id=226, type=Add]; -"227 bert/embeddings/add_1" [id=227, type=Add]; -"228 bert/embeddings/LayerNorm/moments/mean" [id=228, type=ReduceMean]; -"229 bert/embeddings/LayerNorm/moments/StopGradient" [id=229, type=Identity]; -"230 bert/embeddings/LayerNorm/moments/SquaredDifference" [id=230, type=Sub]; -"231 bert/embeddings/LayerNorm/moments/SquaredDifference__72" [id=231, type=Mul]; -"232 bert/embeddings/LayerNorm/moments/variance" [id=232, type=ReduceMean]; -"233 bert/embeddings/LayerNorm/batchnorm/add" [id=233, type=Add]; -"234 bert/embeddings/LayerNorm/batchnorm/Rsqrt" [id=234, type=Sqrt]; -"235 bert/embeddings/LayerNorm/batchnorm/Rsqrt__74" [id=235, type=Reciprocal]; -"236 bert/embeddings/LayerNorm/batchnorm/mul" [id=236, type=Mul]; -"237 bert/embeddings/LayerNorm/batchnorm/mul_2" [id=237, type=Mul]; -"238 bert/embeddings/LayerNorm/batchnorm/sub" [id=238, type=Sub]; -"239 bert/embeddings/LayerNorm/batchnorm/mul_1" [id=239, type=Mul]; -"240 bert/embeddings/LayerNorm/batchnorm/add_1" [id=240, type=Add]; -"241 bert/encoder/Shape_2" [id=241, type=Shape]; -"242 bert/encoder/Shape_2__76" [id=242, type=Cast]; -"243 bert/encoder/strided_slice_2" [id=243, type=Slice]; -"244 bert/encoder/strided_slice_2__80" [id=244, type=Squeeze]; -"245 bert/encoder/strided_slice_2__81" [id=245, type=Cast]; -"246 bert/encoder/layer_9/attention/self/mul_2" [id=246, type=Mul]; -"247 bert/encoder/layer_9/attention/self/Reshape_3/shape_Unsqueeze__82" [id=247, type=Unsqueeze]; -"248 bert/encoder/layer_9/attention/self/Reshape_3/shape_Concat__84" [id=248, type=Concat]; -"249 bert/encoder/layer_9/attention/self/Reshape_3__434" [id=249, type=Cast]; -"250 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__85" [id=250, type=Unsqueeze]; -"251 bert/encoder/layer_9/attention/self/Reshape_2/shape_Concat__89" [id=251, type=Concat]; -"252 bert/encoder/layer_9/attention/self/Reshape_2__429" [id=252, type=Cast]; -"253 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__90" [id=253, type=Unsqueeze]; -"254 bert/encoder/layer_9/attention/self/Reshape_1/shape_Concat__94" [id=254, type=Concat]; -"255 bert/encoder/layer_9/attention/self/Reshape_1__431" [id=255, type=Cast]; -"256 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__95" [id=256, type=Unsqueeze]; -"257 bert/encoder/layer_9/attention/self/Reshape/shape_Concat__99" [id=257, type=Concat]; -"258 bert/encoder/layer_9/attention/self/Reshape__430" [id=258, type=Cast]; -"259 bert/encoder/layer_8/attention/self/mul_2" [id=259, type=Mul]; -"260 bert/encoder/layer_8/attention/self/Reshape_3/shape_Unsqueeze__100" [id=260, type=Unsqueeze]; -"261 bert/encoder/layer_8/attention/self/Reshape_3/shape_Concat__102" [id=261, type=Concat]; -"262 bert/encoder/layer_8/attention/self/Reshape_3__420" [id=262, type=Cast]; -"263 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__103" [id=263, type=Unsqueeze]; -"264 bert/encoder/layer_8/attention/self/Reshape_2/shape_Concat__107" [id=264, type=Concat]; -"265 bert/encoder/layer_8/attention/self/Reshape_2__415" [id=265, type=Cast]; -"266 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__108" [id=266, type=Unsqueeze]; -"267 bert/encoder/layer_8/attention/self/Reshape_1/shape_Concat__112" [id=267, type=Concat]; -"268 bert/encoder/layer_8/attention/self/Reshape_1__417" [id=268, type=Cast]; -"269 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__113" [id=269, type=Unsqueeze]; -"270 bert/encoder/layer_8/attention/self/Reshape/shape_Concat__117" [id=270, type=Concat]; -"271 bert/encoder/layer_8/attention/self/Reshape__416" [id=271, type=Cast]; -"272 bert/encoder/layer_7/attention/self/mul_2" [id=272, type=Mul]; -"273 bert/encoder/layer_7/attention/self/Reshape_3/shape_Unsqueeze__118" [id=273, type=Unsqueeze]; -"274 bert/encoder/layer_7/attention/self/Reshape_3/shape_Concat__120" [id=274, type=Concat]; -"275 bert/encoder/layer_7/attention/self/Reshape_3__406" [id=275, type=Cast]; -"276 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__121" [id=276, type=Unsqueeze]; -"277 bert/encoder/layer_7/attention/self/Reshape_2/shape_Concat__125" [id=277, type=Concat]; -"278 bert/encoder/layer_7/attention/self/Reshape_2__401" [id=278, type=Cast]; -"279 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__126" [id=279, type=Unsqueeze]; -"280 bert/encoder/layer_7/attention/self/Reshape_1/shape_Concat__130" [id=280, type=Concat]; -"281 bert/encoder/layer_7/attention/self/Reshape_1__403" [id=281, type=Cast]; -"282 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__131" [id=282, type=Unsqueeze]; -"283 bert/encoder/layer_7/attention/self/Reshape/shape_Concat__135" [id=283, type=Concat]; -"284 bert/encoder/layer_7/attention/self/Reshape__402" [id=284, type=Cast]; -"285 bert/encoder/layer_6/attention/self/mul_2" [id=285, type=Mul]; -"286 bert/encoder/layer_6/attention/self/Reshape_3/shape_Unsqueeze__136" [id=286, type=Unsqueeze]; -"287 bert/encoder/layer_6/attention/self/Reshape_3/shape_Concat__138" [id=287, type=Concat]; -"288 bert/encoder/layer_6/attention/self/Reshape_3__392" [id=288, type=Cast]; -"289 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__139" [id=289, type=Unsqueeze]; -"290 bert/encoder/layer_6/attention/self/Reshape_2/shape_Concat__143" [id=290, type=Concat]; -"291 bert/encoder/layer_6/attention/self/Reshape_2__387" [id=291, type=Cast]; -"292 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__144" [id=292, type=Unsqueeze]; -"293 bert/encoder/layer_6/attention/self/Reshape_1/shape_Concat__148" [id=293, type=Concat]; -"294 bert/encoder/layer_6/attention/self/Reshape_1__389" [id=294, type=Cast]; -"295 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__149" [id=295, type=Unsqueeze]; -"296 bert/encoder/layer_6/attention/self/Reshape/shape_Concat__153" [id=296, type=Concat]; -"297 bert/encoder/layer_6/attention/self/Reshape__388" [id=297, type=Cast]; -"298 bert/encoder/layer_5/attention/self/mul_2" [id=298, type=Mul]; -"299 bert/encoder/layer_5/attention/self/Reshape_3/shape_Unsqueeze__154" [id=299, type=Unsqueeze]; -"300 bert/encoder/layer_5/attention/self/Reshape_3/shape_Concat__156" [id=300, type=Concat]; -"301 bert/encoder/layer_5/attention/self/Reshape_3__378" [id=301, type=Cast]; -"302 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__157" [id=302, type=Unsqueeze]; -"303 bert/encoder/layer_5/attention/self/Reshape_2/shape_Concat__161" [id=303, type=Concat]; -"304 bert/encoder/layer_5/attention/self/Reshape_2__373" [id=304, type=Cast]; -"305 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__162" [id=305, type=Unsqueeze]; -"306 bert/encoder/layer_5/attention/self/Reshape_1/shape_Concat__166" [id=306, type=Concat]; -"307 bert/encoder/layer_5/attention/self/Reshape_1__375" [id=307, type=Cast]; -"308 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__167" [id=308, type=Unsqueeze]; -"309 bert/encoder/layer_5/attention/self/Reshape/shape_Concat__171" [id=309, type=Concat]; -"310 bert/encoder/layer_5/attention/self/Reshape__374" [id=310, type=Cast]; -"311 bert/encoder/layer_4/attention/self/mul_2" [id=311, type=Mul]; -"312 bert/encoder/layer_4/attention/self/Reshape_3/shape_Unsqueeze__172" [id=312, type=Unsqueeze]; -"313 bert/encoder/layer_4/attention/self/Reshape_3/shape_Concat__174" [id=313, type=Concat]; -"314 bert/encoder/layer_4/attention/self/Reshape_3__364" [id=314, type=Cast]; -"315 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__175" [id=315, type=Unsqueeze]; -"316 bert/encoder/layer_4/attention/self/Reshape_2/shape_Concat__179" [id=316, type=Concat]; -"317 bert/encoder/layer_4/attention/self/Reshape_2__359" [id=317, type=Cast]; -"318 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__180" [id=318, type=Unsqueeze]; -"319 bert/encoder/layer_4/attention/self/Reshape_1/shape_Concat__184" [id=319, type=Concat]; -"320 bert/encoder/layer_4/attention/self/Reshape_1__361" [id=320, type=Cast]; -"321 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__185" [id=321, type=Unsqueeze]; -"322 bert/encoder/layer_4/attention/self/Reshape/shape_Concat__189" [id=322, type=Concat]; -"323 bert/encoder/layer_4/attention/self/Reshape__360" [id=323, type=Cast]; -"324 bert/encoder/layer_3/attention/self/mul_2" [id=324, type=Mul]; -"325 bert/encoder/layer_3/attention/self/Reshape_3/shape_Unsqueeze__190" [id=325, type=Unsqueeze]; -"326 bert/encoder/layer_3/attention/self/Reshape_3/shape_Concat__192" [id=326, type=Concat]; -"327 bert/encoder/layer_3/attention/self/Reshape_3__350" [id=327, type=Cast]; -"328 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__193" [id=328, type=Unsqueeze]; -"329 bert/encoder/layer_3/attention/self/Reshape_2/shape_Concat__197" [id=329, type=Concat]; -"330 bert/encoder/layer_3/attention/self/Reshape_2__345" [id=330, type=Cast]; -"331 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__198" [id=331, type=Unsqueeze]; -"332 bert/encoder/layer_3/attention/self/Reshape_1/shape_Concat__202" [id=332, type=Concat]; -"333 bert/encoder/layer_3/attention/self/Reshape_1__347" [id=333, type=Cast]; -"334 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__203" [id=334, type=Unsqueeze]; -"335 bert/encoder/layer_3/attention/self/Reshape/shape_Concat__207" [id=335, type=Concat]; -"336 bert/encoder/layer_3/attention/self/Reshape__346" [id=336, type=Cast]; -"337 bert/encoder/layer_2/attention/self/mul_2" [id=337, type=Mul]; -"338 bert/encoder/layer_2/attention/self/Reshape_3/shape_Unsqueeze__208" [id=338, type=Unsqueeze]; -"339 bert/encoder/layer_2/attention/self/Reshape_3/shape_Concat__210" [id=339, type=Concat]; -"340 bert/encoder/layer_2/attention/self/Reshape_3__336" [id=340, type=Cast]; -"341 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__211" [id=341, type=Unsqueeze]; -"342 bert/encoder/layer_2/attention/self/Reshape_2/shape_Concat__215" [id=342, type=Concat]; -"343 bert/encoder/layer_2/attention/self/Reshape_2__331" [id=343, type=Cast]; -"344 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__216" [id=344, type=Unsqueeze]; -"345 bert/encoder/layer_2/attention/self/Reshape_1/shape_Concat__220" [id=345, type=Concat]; -"346 bert/encoder/layer_2/attention/self/Reshape_1__333" [id=346, type=Cast]; -"347 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__221" [id=347, type=Unsqueeze]; -"348 bert/encoder/layer_2/attention/self/Reshape/shape_Concat__225" [id=348, type=Concat]; -"349 bert/encoder/layer_2/attention/self/Reshape__332" [id=349, type=Cast]; -"350 bert/encoder/layer_11/attention/self/mul_2" [id=350, type=Mul]; -"351 bert/encoder/layer_11/attention/self/Reshape_3/shape_Unsqueeze__226" [id=351, type=Unsqueeze]; -"352 bert/encoder/layer_11/attention/self/Reshape_3/shape_Concat__228" [id=352, type=Concat]; -"353 bert/encoder/layer_11/attention/self/Reshape_3__462" [id=353, type=Cast]; -"354 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__229" [id=354, type=Unsqueeze]; -"355 bert/encoder/layer_11/attention/self/Reshape_2/shape_Concat__233" [id=355, type=Concat]; -"356 bert/encoder/layer_11/attention/self/Reshape_2__457" [id=356, type=Cast]; -"357 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__234" [id=357, type=Unsqueeze]; -"358 bert/encoder/layer_11/attention/self/Reshape_1/shape_Concat__238" [id=358, type=Concat]; -"359 bert/encoder/layer_11/attention/self/Reshape_1__459" [id=359, type=Cast]; -"360 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__239" [id=360, type=Unsqueeze]; -"361 bert/encoder/layer_11/attention/self/Reshape/shape_Concat__243" [id=361, type=Concat]; -"362 bert/encoder/layer_11/attention/self/Reshape__458" [id=362, type=Cast]; -"363 bert/encoder/layer_10/attention/self/mul_2" [id=363, type=Mul]; -"364 bert/encoder/layer_10/attention/self/Reshape_3/shape_Unsqueeze__244" [id=364, type=Unsqueeze]; -"365 bert/encoder/layer_10/attention/self/Reshape_3/shape_Concat__246" [id=365, type=Concat]; -"366 bert/encoder/layer_10/attention/self/Reshape_3__448" [id=366, type=Cast]; -"367 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__247" [id=367, type=Unsqueeze]; -"368 bert/encoder/layer_10/attention/self/Reshape_2/shape_Concat__251" [id=368, type=Concat]; -"369 bert/encoder/layer_10/attention/self/Reshape_2__443" [id=369, type=Cast]; -"370 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__252" [id=370, type=Unsqueeze]; -"371 bert/encoder/layer_10/attention/self/Reshape_1/shape_Concat__256" [id=371, type=Concat]; -"372 bert/encoder/layer_10/attention/self/Reshape_1__445" [id=372, type=Cast]; -"373 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__257" [id=373, type=Unsqueeze]; -"374 bert/encoder/layer_10/attention/self/Reshape/shape_Concat__261" [id=374, type=Concat]; -"375 bert/encoder/layer_10/attention/self/Reshape__444" [id=375, type=Cast]; -"376 bert/encoder/layer_1/attention/self/mul_2" [id=376, type=Mul]; -"377 bert/encoder/layer_1/attention/self/Reshape_3/shape_Unsqueeze__262" [id=377, type=Unsqueeze]; -"378 bert/encoder/layer_1/attention/self/Reshape_3/shape_Concat__264" [id=378, type=Concat]; -"379 bert/encoder/layer_1/attention/self/Reshape_3__322" [id=379, type=Cast]; -"380 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__265" [id=380, type=Unsqueeze]; -"381 bert/encoder/layer_1/attention/self/Reshape_2/shape_Concat__269" [id=381, type=Concat]; -"382 bert/encoder/layer_1/attention/self/Reshape_2__317" [id=382, type=Cast]; -"383 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__270" [id=383, type=Unsqueeze]; -"384 bert/encoder/layer_1/attention/self/Reshape_1/shape_Concat__274" [id=384, type=Concat]; -"385 bert/encoder/layer_1/attention/self/Reshape_1__319" [id=385, type=Cast]; -"386 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__275" [id=386, type=Unsqueeze]; -"387 bert/encoder/layer_1/attention/self/Reshape/shape_Concat__279" [id=387, type=Concat]; -"388 bert/encoder/layer_1/attention/self/Reshape__318" [id=388, type=Cast]; -"389 bert/encoder/layer_0/attention/self/mul_2" [id=389, type=Mul]; -"390 bert/encoder/layer_0/attention/self/Reshape_3/shape_Unsqueeze__280" [id=390, type=Unsqueeze]; -"391 bert/encoder/layer_0/attention/self/Reshape_3/shape_Concat__282" [id=391, type=Concat]; -"392 bert/encoder/layer_0/attention/self/Reshape_3__308" [id=392, type=Cast]; -"393 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__283" [id=393, type=Unsqueeze]; -"394 bert/encoder/layer_0/attention/self/Reshape_2/shape_Concat__287" [id=394, type=Concat]; -"395 bert/encoder/layer_0/attention/self/Reshape_2__303" [id=395, type=Cast]; -"396 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__288" [id=396, type=Unsqueeze]; -"397 bert/encoder/layer_0/attention/self/Reshape_1/shape_Concat__292" [id=397, type=Concat]; -"398 bert/encoder/layer_0/attention/self/Reshape_1__305" [id=398, type=Cast]; -"399 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__293" [id=399, type=Unsqueeze]; -"400 bert/encoder/layer_0/attention/self/Reshape/shape_Concat__297" [id=400, type=Concat]; -"401 bert/encoder/layer_0/attention/self/Reshape__304" [id=401, type=Cast]; -"402 bert/encoder/Reshape_13/shape_Unsqueeze__298" [id=402, type=Unsqueeze]; -"403 bert/encoder/Reshape_13/shape_Concat__301" [id=403, type=Concat]; -"404 bert/encoder/Reshape_13__471" [id=404, type=Cast]; -"405 bert/encoder/Reshape_1" [id=405, type=Reshape]; -"406 QuantizeLinear_bert/encoder/Reshape_1^0_1" [id=406, label="406 QuantizeLinear_bert/encoder/Reshape_1:0_1", type=QuantizeLinear]; -"407 DequantizeLinear_bert/encoder/Reshape_1^0_1" [id=407, label="407 DequantizeLinear_bert/encoder/Reshape_1:0_1", type=DequantizeLinear]; -"408 QuantizeLinear_bert/encoder/layer_0/attention/self/value/kernel^0_1" [id=408, label="408 QuantizeLinear_bert/encoder/layer_0/attention/self/value/kernel:0_1", type=QuantizeLinear]; -"409 DequantizeLinear_bert/encoder/layer_0/attention/self/value/kernel^0_1" [id=409, label="409 DequantizeLinear_bert/encoder/layer_0/attention/self/value/kernel:0_1", type=DequantizeLinear]; -"410 QuantizeLinear_bert/encoder/Reshape_1^0_2" [id=410, label="410 QuantizeLinear_bert/encoder/Reshape_1:0_2", type=QuantizeLinear]; -"411 DequantizeLinear_bert/encoder/Reshape_1^0_2" [id=411, label="411 DequantizeLinear_bert/encoder/Reshape_1:0_2", type=DequantizeLinear]; -"412 QuantizeLinear_bert/encoder/Reshape_1^0_3" [id=412, label="412 QuantizeLinear_bert/encoder/Reshape_1:0_3", type=QuantizeLinear]; -"413 DequantizeLinear_bert/encoder/Reshape_1^0_3" [id=413, label="413 DequantizeLinear_bert/encoder/Reshape_1:0_3", type=DequantizeLinear]; -"414 bert/encoder/layer_0/attention/self/value/MatMul" [id=414, type=MatMul]; -"415 bert/encoder/layer_0/attention/self/value/BiasAdd" [id=415, type=Add]; -"416 bert/encoder/layer_0/attention/self/Reshape_2" [id=416, type=Reshape]; -"417 bert/encoder/layer_0/attention/self/transpose_2" [id=417, type=Transpose]; -"418 QuantizeLinear_bert/encoder/layer_0/attention/self/query/kernel^0_1" [id=418, label="418 QuantizeLinear_bert/encoder/layer_0/attention/self/query/kernel:0_1", type=QuantizeLinear]; -"419 DequantizeLinear_bert/encoder/layer_0/attention/self/query/kernel^0_1" [id=419, label="419 DequantizeLinear_bert/encoder/layer_0/attention/self/query/kernel:0_1", type=DequantizeLinear]; -"420 bert/encoder/layer_0/attention/self/query/MatMul" [id=420, type=MatMul]; -"421 bert/encoder/layer_0/attention/self/query/BiasAdd" [id=421, type=Add]; -"422 QuantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd^0_1" [id=422, label="422 QuantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; -"423 DequantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd^0_1" [id=423, label="423 DequantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; -"424 bert/encoder/layer_0/attention/self/Reshape" [id=424, type=Reshape]; -"425 bert/encoder/layer_0/attention/self/transpose" [id=425, type=Transpose]; -"426 QuantizeLinear_bert/encoder/layer_0/attention/self/key/kernel^0_1" [id=426, label="426 QuantizeLinear_bert/encoder/layer_0/attention/self/key/kernel:0_1", type=QuantizeLinear]; -"427 DequantizeLinear_bert/encoder/layer_0/attention/self/key/kernel^0_1" [id=427, label="427 DequantizeLinear_bert/encoder/layer_0/attention/self/key/kernel:0_1", type=DequantizeLinear]; -"428 bert/encoder/layer_0/attention/self/key/MatMul" [id=428, type=MatMul]; -"429 bert/encoder/layer_0/attention/self/key/BiasAdd" [id=429, type=Add]; -"430 QuantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd^0_1" [id=430, label="430 QuantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; -"431 DequantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd^0_1" [id=431, label="431 DequantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; -"432 bert/encoder/layer_0/attention/self/Reshape_1" [id=432, type=Reshape]; -"433 bert/encoder/layer_0/attention/self/transpose_1" [id=433, type=Transpose]; -"434 bert/encoder/layer_0/attention/self/MatMul__306" [id=434, type=Transpose]; -"435 bert/encoder/layer_0/attention/self/MatMul" [id=435, type=MatMul]; -"436 bert/encoder/layer_0/attention/self/Mul" [id=436, type=Mul]; -"437 bert/encoder/layer_0/attention/self/add" [id=437, type=Add]; -"438 bert/encoder/layer_0/attention/self/Softmax" [id=438, type=Softmax]; -"439 bert/encoder/layer_0/attention/self/MatMul_1" [id=439, type=MatMul]; -"440 QuantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1^0_1" [id=440, label="440 QuantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1:0_1", type=QuantizeLinear]; -"441 DequantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1^0_1" [id=441, label="441 DequantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1:0_1", type=DequantizeLinear]; -"442 bert/encoder/layer_0/attention/self/transpose_3" [id=442, type=Transpose]; -"443 bert/encoder/layer_0/attention/self/Reshape_3" [id=443, type=Reshape]; -"444 QuantizeLinear_bert/encoder/layer_0/attention/output/dense/kernel^0_1" [id=444, label="444 QuantizeLinear_bert/encoder/layer_0/attention/output/dense/kernel:0_1", type=QuantizeLinear]; -"445 DequantizeLinear_bert/encoder/layer_0/attention/output/dense/kernel^0_1" [id=445, label="445 DequantizeLinear_bert/encoder/layer_0/attention/output/dense/kernel:0_1", type=DequantizeLinear]; -"446 bert/encoder/layer_0/attention/output/dense/MatMul" [id=446, type=MatMul]; -"447 bert/encoder/layer_0/attention/output/dense/BiasAdd" [id=447, type=Add]; -"448 bert/encoder/layer_0/attention/output/add" [id=448, type=Add]; -"449 bert/encoder/layer_0/attention/output/LayerNorm/moments/mean" [id=449, type=ReduceMean]; -"450 bert/encoder/layer_0/attention/output/LayerNorm/moments/StopGradient" [id=450, type=Identity]; -"451 bert/encoder/layer_0/attention/output/LayerNorm/moments/SquaredDifference" [id=451, type=Sub]; -"452 bert/encoder/layer_0/attention/output/LayerNorm/moments/SquaredDifference__309" [id=452, type=Mul]; -"453 bert/encoder/layer_0/attention/output/LayerNorm/moments/variance" [id=453, type=ReduceMean]; -"454 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add" [id=454, type=Add]; -"455 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/Rsqrt" [id=455, type=Sqrt]; -"456 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/Rsqrt__311" [id=456, type=Reciprocal]; -"457 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul" [id=457, type=Mul]; -"458 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_2" [id=458, type=Mul]; -"459 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/sub" [id=459, type=Sub]; -"460 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_1" [id=460, type=Mul]; -"461 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1" [id=461, type=Add]; -"462 QuantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=462, label="462 QuantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"463 DequantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=463, label="463 DequantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"464 QuantizeLinear_bert/encoder/layer_0/intermediate/dense/kernel^0_1" [id=464, label="464 QuantizeLinear_bert/encoder/layer_0/intermediate/dense/kernel:0_1", type=QuantizeLinear]; -"465 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/kernel^0_1" [id=465, label="465 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/kernel:0_1", type=DequantizeLinear]; -"466 bert/encoder/layer_0/intermediate/dense/MatMul" [id=466, type=MatMul]; -"467 bert/encoder/layer_0/intermediate/dense/BiasAdd" [id=467, type=Add]; -"468 bert/encoder/layer_0/intermediate/dense/Pow" [id=468, type=Pow]; -"469 bert/encoder/layer_0/intermediate/dense/mul" [id=469, type=Mul]; -"470 bert/encoder/layer_0/intermediate/dense/add" [id=470, type=Add]; -"471 bert/encoder/layer_0/intermediate/dense/mul_1" [id=471, type=Mul]; -"472 bert/encoder/layer_0/intermediate/dense/Tanh" [id=472, type=Tanh]; -"473 bert/encoder/layer_0/intermediate/dense/add_1" [id=473, type=Add]; -"474 bert/encoder/layer_0/intermediate/dense/mul_2" [id=474, type=Mul]; -"475 bert/encoder/layer_0/intermediate/dense/mul_3" [id=475, type=Mul]; -"476 QuantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3^0_1" [id=476, label="476 QuantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; -"477 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3^0_1" [id=477, label="477 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; -"478 QuantizeLinear_bert/encoder/layer_0/output/dense/kernel^0_1" [id=478, label="478 QuantizeLinear_bert/encoder/layer_0/output/dense/kernel:0_1", type=QuantizeLinear]; -"479 DequantizeLinear_bert/encoder/layer_0/output/dense/kernel^0_1" [id=479, label="479 DequantizeLinear_bert/encoder/layer_0/output/dense/kernel:0_1", type=DequantizeLinear]; -"480 bert/encoder/layer_0/output/dense/MatMul" [id=480, type=MatMul]; -"481 bert/encoder/layer_0/output/dense/BiasAdd" [id=481, type=Add]; -"482 bert/encoder/layer_0/output/add" [id=482, type=Add]; -"483 bert/encoder/layer_0/output/LayerNorm/moments/mean" [id=483, type=ReduceMean]; -"484 bert/encoder/layer_0/output/LayerNorm/moments/StopGradient" [id=484, type=Identity]; -"485 bert/encoder/layer_0/output/LayerNorm/moments/SquaredDifference" [id=485, type=Sub]; -"486 bert/encoder/layer_0/output/LayerNorm/moments/SquaredDifference__313" [id=486, type=Mul]; -"487 bert/encoder/layer_0/output/LayerNorm/moments/variance" [id=487, type=ReduceMean]; -"488 bert/encoder/layer_0/output/LayerNorm/batchnorm/add" [id=488, type=Add]; -"489 bert/encoder/layer_0/output/LayerNorm/batchnorm/Rsqrt" [id=489, type=Sqrt]; -"490 bert/encoder/layer_0/output/LayerNorm/batchnorm/Rsqrt__315" [id=490, type=Reciprocal]; -"491 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul" [id=491, type=Mul]; -"492 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_2" [id=492, type=Mul]; -"493 bert/encoder/layer_0/output/LayerNorm/batchnorm/sub" [id=493, type=Sub]; -"494 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_1" [id=494, type=Mul]; -"495 bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1" [id=495, type=Add]; -"496 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_1" [id=496, label="496 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"497 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_1" [id=497, label="497 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"498 QuantizeLinear_bert/encoder/layer_1/attention/self/value/kernel^0_1" [id=498, label="498 QuantizeLinear_bert/encoder/layer_1/attention/self/value/kernel:0_1", type=QuantizeLinear]; -"499 DequantizeLinear_bert/encoder/layer_1/attention/self/value/kernel^0_1" [id=499, label="499 DequantizeLinear_bert/encoder/layer_1/attention/self/value/kernel:0_1", type=DequantizeLinear]; -"500 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_2" [id=500, label="500 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; -"501 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_2" [id=501, label="501 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; -"502 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_3" [id=502, label="502 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; -"503 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_3" [id=503, label="503 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; -"504 bert/encoder/layer_1/attention/self/value/MatMul" [id=504, type=MatMul]; -"505 bert/encoder/layer_1/attention/self/value/BiasAdd" [id=505, type=Add]; -"506 bert/encoder/layer_1/attention/self/Reshape_2" [id=506, type=Reshape]; -"507 bert/encoder/layer_1/attention/self/transpose_2" [id=507, type=Transpose]; -"508 QuantizeLinear_bert/encoder/layer_1/attention/self/query/kernel^0_1" [id=508, label="508 QuantizeLinear_bert/encoder/layer_1/attention/self/query/kernel:0_1", type=QuantizeLinear]; -"509 DequantizeLinear_bert/encoder/layer_1/attention/self/query/kernel^0_1" [id=509, label="509 DequantizeLinear_bert/encoder/layer_1/attention/self/query/kernel:0_1", type=DequantizeLinear]; -"510 bert/encoder/layer_1/attention/self/query/MatMul" [id=510, type=MatMul]; -"511 bert/encoder/layer_1/attention/self/query/BiasAdd" [id=511, type=Add]; -"512 QuantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd^0_1" [id=512, label="512 QuantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; -"513 DequantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd^0_1" [id=513, label="513 DequantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; -"514 bert/encoder/layer_1/attention/self/Reshape" [id=514, type=Reshape]; -"515 bert/encoder/layer_1/attention/self/transpose" [id=515, type=Transpose]; -"516 QuantizeLinear_bert/encoder/layer_1/attention/self/key/kernel^0_1" [id=516, label="516 QuantizeLinear_bert/encoder/layer_1/attention/self/key/kernel:0_1", type=QuantizeLinear]; -"517 DequantizeLinear_bert/encoder/layer_1/attention/self/key/kernel^0_1" [id=517, label="517 DequantizeLinear_bert/encoder/layer_1/attention/self/key/kernel:0_1", type=DequantizeLinear]; -"518 bert/encoder/layer_1/attention/self/key/MatMul" [id=518, type=MatMul]; -"519 bert/encoder/layer_1/attention/self/key/BiasAdd" [id=519, type=Add]; -"520 QuantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd^0_1" [id=520, label="520 QuantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; -"521 DequantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd^0_1" [id=521, label="521 DequantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; -"522 bert/encoder/layer_1/attention/self/Reshape_1" [id=522, type=Reshape]; -"523 bert/encoder/layer_1/attention/self/transpose_1" [id=523, type=Transpose]; -"524 bert/encoder/layer_1/attention/self/MatMul__320" [id=524, type=Transpose]; -"525 bert/encoder/layer_1/attention/self/MatMul" [id=525, type=MatMul]; -"526 bert/encoder/layer_1/attention/self/Mul" [id=526, type=Mul]; -"527 bert/encoder/layer_1/attention/self/add" [id=527, type=Add]; -"528 bert/encoder/layer_1/attention/self/Softmax" [id=528, type=Softmax]; -"529 bert/encoder/layer_1/attention/self/MatMul_1" [id=529, type=MatMul]; -"530 QuantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1^0_1" [id=530, label="530 QuantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1:0_1", type=QuantizeLinear]; -"531 DequantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1^0_1" [id=531, label="531 DequantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1:0_1", type=DequantizeLinear]; -"532 bert/encoder/layer_1/attention/self/transpose_3" [id=532, type=Transpose]; -"533 bert/encoder/layer_1/attention/self/Reshape_3" [id=533, type=Reshape]; -"534 QuantizeLinear_bert/encoder/layer_1/attention/output/dense/kernel^0_1" [id=534, label="534 QuantizeLinear_bert/encoder/layer_1/attention/output/dense/kernel:0_1", type=QuantizeLinear]; -"535 DequantizeLinear_bert/encoder/layer_1/attention/output/dense/kernel^0_1" [id=535, label="535 DequantizeLinear_bert/encoder/layer_1/attention/output/dense/kernel:0_1", type=DequantizeLinear]; -"536 bert/encoder/layer_1/attention/output/dense/MatMul" [id=536, type=MatMul]; -"537 bert/encoder/layer_1/attention/output/dense/BiasAdd" [id=537, type=Add]; -"538 bert/encoder/layer_1/attention/output/add" [id=538, type=Add]; -"539 bert/encoder/layer_1/attention/output/LayerNorm/moments/mean" [id=539, type=ReduceMean]; -"540 bert/encoder/layer_1/attention/output/LayerNorm/moments/StopGradient" [id=540, type=Identity]; -"541 bert/encoder/layer_1/attention/output/LayerNorm/moments/SquaredDifference" [id=541, type=Sub]; -"542 bert/encoder/layer_1/attention/output/LayerNorm/moments/SquaredDifference__323" [id=542, type=Mul]; -"543 bert/encoder/layer_1/attention/output/LayerNorm/moments/variance" [id=543, type=ReduceMean]; -"544 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add" [id=544, type=Add]; -"545 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/Rsqrt" [id=545, type=Sqrt]; -"546 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/Rsqrt__325" [id=546, type=Reciprocal]; -"547 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul" [id=547, type=Mul]; -"548 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_2" [id=548, type=Mul]; -"549 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/sub" [id=549, type=Sub]; -"550 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_1" [id=550, type=Mul]; -"551 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1" [id=551, type=Add]; -"552 QuantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=552, label="552 QuantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"553 DequantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=553, label="553 DequantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"554 QuantizeLinear_bert/encoder/layer_1/intermediate/dense/kernel^0_1" [id=554, label="554 QuantizeLinear_bert/encoder/layer_1/intermediate/dense/kernel:0_1", type=QuantizeLinear]; -"555 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/kernel^0_1" [id=555, label="555 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/kernel:0_1", type=DequantizeLinear]; -"556 bert/encoder/layer_1/intermediate/dense/MatMul" [id=556, type=MatMul]; -"557 bert/encoder/layer_1/intermediate/dense/BiasAdd" [id=557, type=Add]; -"558 bert/encoder/layer_1/intermediate/dense/Pow" [id=558, type=Pow]; -"559 bert/encoder/layer_1/intermediate/dense/mul" [id=559, type=Mul]; -"560 bert/encoder/layer_1/intermediate/dense/add" [id=560, type=Add]; -"561 bert/encoder/layer_1/intermediate/dense/mul_1" [id=561, type=Mul]; -"562 bert/encoder/layer_1/intermediate/dense/Tanh" [id=562, type=Tanh]; -"563 bert/encoder/layer_1/intermediate/dense/add_1" [id=563, type=Add]; -"564 bert/encoder/layer_1/intermediate/dense/mul_2" [id=564, type=Mul]; -"565 bert/encoder/layer_1/intermediate/dense/mul_3" [id=565, type=Mul]; -"566 QuantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3^0_1" [id=566, label="566 QuantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; -"567 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3^0_1" [id=567, label="567 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; -"568 QuantizeLinear_bert/encoder/layer_1/output/dense/kernel^0_1" [id=568, label="568 QuantizeLinear_bert/encoder/layer_1/output/dense/kernel:0_1", type=QuantizeLinear]; -"569 DequantizeLinear_bert/encoder/layer_1/output/dense/kernel^0_1" [id=569, label="569 DequantizeLinear_bert/encoder/layer_1/output/dense/kernel:0_1", type=DequantizeLinear]; -"570 bert/encoder/layer_1/output/dense/MatMul" [id=570, type=MatMul]; -"571 bert/encoder/layer_1/output/dense/BiasAdd" [id=571, type=Add]; -"572 bert/encoder/layer_1/output/add" [id=572, type=Add]; -"573 bert/encoder/layer_1/output/LayerNorm/moments/mean" [id=573, type=ReduceMean]; -"574 bert/encoder/layer_1/output/LayerNorm/moments/StopGradient" [id=574, type=Identity]; -"575 bert/encoder/layer_1/output/LayerNorm/moments/SquaredDifference" [id=575, type=Sub]; -"576 bert/encoder/layer_1/output/LayerNorm/moments/SquaredDifference__327" [id=576, type=Mul]; -"577 bert/encoder/layer_1/output/LayerNorm/moments/variance" [id=577, type=ReduceMean]; -"578 bert/encoder/layer_1/output/LayerNorm/batchnorm/add" [id=578, type=Add]; -"579 bert/encoder/layer_1/output/LayerNorm/batchnorm/Rsqrt" [id=579, type=Sqrt]; -"580 bert/encoder/layer_1/output/LayerNorm/batchnorm/Rsqrt__329" [id=580, type=Reciprocal]; -"581 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul" [id=581, type=Mul]; -"582 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_2" [id=582, type=Mul]; -"583 bert/encoder/layer_1/output/LayerNorm/batchnorm/sub" [id=583, type=Sub]; -"584 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_1" [id=584, type=Mul]; -"585 bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1" [id=585, type=Add]; -"586 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_1" [id=586, label="586 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"587 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_1" [id=587, label="587 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"588 QuantizeLinear_bert/encoder/layer_2/attention/self/value/kernel^0_1" [id=588, label="588 QuantizeLinear_bert/encoder/layer_2/attention/self/value/kernel:0_1", type=QuantizeLinear]; -"589 DequantizeLinear_bert/encoder/layer_2/attention/self/value/kernel^0_1" [id=589, label="589 DequantizeLinear_bert/encoder/layer_2/attention/self/value/kernel:0_1", type=DequantizeLinear]; -"590 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_2" [id=590, label="590 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; -"591 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_2" [id=591, label="591 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; -"592 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_3" [id=592, label="592 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; -"593 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_3" [id=593, label="593 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; -"594 bert/encoder/layer_2/attention/self/value/MatMul" [id=594, type=MatMul]; -"595 bert/encoder/layer_2/attention/self/value/BiasAdd" [id=595, type=Add]; -"596 bert/encoder/layer_2/attention/self/Reshape_2" [id=596, type=Reshape]; -"597 bert/encoder/layer_2/attention/self/transpose_2" [id=597, type=Transpose]; -"598 QuantizeLinear_bert/encoder/layer_2/attention/self/query/kernel^0_1" [id=598, label="598 QuantizeLinear_bert/encoder/layer_2/attention/self/query/kernel:0_1", type=QuantizeLinear]; -"599 DequantizeLinear_bert/encoder/layer_2/attention/self/query/kernel^0_1" [id=599, label="599 DequantizeLinear_bert/encoder/layer_2/attention/self/query/kernel:0_1", type=DequantizeLinear]; -"600 bert/encoder/layer_2/attention/self/query/MatMul" [id=600, type=MatMul]; -"601 bert/encoder/layer_2/attention/self/query/BiasAdd" [id=601, type=Add]; -"602 QuantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd^0_1" [id=602, label="602 QuantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; -"603 DequantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd^0_1" [id=603, label="603 DequantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; -"604 bert/encoder/layer_2/attention/self/Reshape" [id=604, type=Reshape]; -"605 bert/encoder/layer_2/attention/self/transpose" [id=605, type=Transpose]; -"606 QuantizeLinear_bert/encoder/layer_2/attention/self/key/kernel^0_1" [id=606, label="606 QuantizeLinear_bert/encoder/layer_2/attention/self/key/kernel:0_1", type=QuantizeLinear]; -"607 DequantizeLinear_bert/encoder/layer_2/attention/self/key/kernel^0_1" [id=607, label="607 DequantizeLinear_bert/encoder/layer_2/attention/self/key/kernel:0_1", type=DequantizeLinear]; -"608 bert/encoder/layer_2/attention/self/key/MatMul" [id=608, type=MatMul]; -"609 bert/encoder/layer_2/attention/self/key/BiasAdd" [id=609, type=Add]; -"610 QuantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd^0_1" [id=610, label="610 QuantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; -"611 DequantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd^0_1" [id=611, label="611 DequantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; -"612 bert/encoder/layer_2/attention/self/Reshape_1" [id=612, type=Reshape]; -"613 bert/encoder/layer_2/attention/self/transpose_1" [id=613, type=Transpose]; -"614 bert/encoder/layer_2/attention/self/MatMul__334" [id=614, type=Transpose]; -"615 bert/encoder/layer_2/attention/self/MatMul" [id=615, type=MatMul]; -"616 bert/encoder/layer_2/attention/self/Mul" [id=616, type=Mul]; -"617 bert/encoder/layer_2/attention/self/add" [id=617, type=Add]; -"618 bert/encoder/layer_2/attention/self/Softmax" [id=618, type=Softmax]; -"619 bert/encoder/layer_2/attention/self/MatMul_1" [id=619, type=MatMul]; -"620 QuantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1^0_1" [id=620, label="620 QuantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1:0_1", type=QuantizeLinear]; -"621 DequantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1^0_1" [id=621, label="621 DequantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1:0_1", type=DequantizeLinear]; -"622 bert/encoder/layer_2/attention/self/transpose_3" [id=622, type=Transpose]; -"623 bert/encoder/layer_2/attention/self/Reshape_3" [id=623, type=Reshape]; -"624 QuantizeLinear_bert/encoder/layer_2/attention/output/dense/kernel^0_1" [id=624, label="624 QuantizeLinear_bert/encoder/layer_2/attention/output/dense/kernel:0_1", type=QuantizeLinear]; -"625 DequantizeLinear_bert/encoder/layer_2/attention/output/dense/kernel^0_1" [id=625, label="625 DequantizeLinear_bert/encoder/layer_2/attention/output/dense/kernel:0_1", type=DequantizeLinear]; -"626 bert/encoder/layer_2/attention/output/dense/MatMul" [id=626, type=MatMul]; -"627 bert/encoder/layer_2/attention/output/dense/BiasAdd" [id=627, type=Add]; -"628 bert/encoder/layer_2/attention/output/add" [id=628, type=Add]; -"629 bert/encoder/layer_2/attention/output/LayerNorm/moments/mean" [id=629, type=ReduceMean]; -"630 bert/encoder/layer_2/attention/output/LayerNorm/moments/StopGradient" [id=630, type=Identity]; -"631 bert/encoder/layer_2/attention/output/LayerNorm/moments/SquaredDifference" [id=631, type=Sub]; -"632 bert/encoder/layer_2/attention/output/LayerNorm/moments/SquaredDifference__337" [id=632, type=Mul]; -"633 bert/encoder/layer_2/attention/output/LayerNorm/moments/variance" [id=633, type=ReduceMean]; -"634 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add" [id=634, type=Add]; -"635 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/Rsqrt" [id=635, type=Sqrt]; -"636 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/Rsqrt__339" [id=636, type=Reciprocal]; -"637 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul" [id=637, type=Mul]; -"638 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_2" [id=638, type=Mul]; -"639 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/sub" [id=639, type=Sub]; -"640 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_1" [id=640, type=Mul]; -"641 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1" [id=641, type=Add]; -"642 QuantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=642, label="642 QuantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"643 DequantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=643, label="643 DequantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"644 QuantizeLinear_bert/encoder/layer_2/intermediate/dense/kernel^0_1" [id=644, label="644 QuantizeLinear_bert/encoder/layer_2/intermediate/dense/kernel:0_1", type=QuantizeLinear]; -"645 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/kernel^0_1" [id=645, label="645 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/kernel:0_1", type=DequantizeLinear]; -"646 bert/encoder/layer_2/intermediate/dense/MatMul" [id=646, type=MatMul]; -"647 bert/encoder/layer_2/intermediate/dense/BiasAdd" [id=647, type=Add]; -"648 bert/encoder/layer_2/intermediate/dense/Pow" [id=648, type=Pow]; -"649 bert/encoder/layer_2/intermediate/dense/mul" [id=649, type=Mul]; -"650 bert/encoder/layer_2/intermediate/dense/add" [id=650, type=Add]; -"651 bert/encoder/layer_2/intermediate/dense/mul_1" [id=651, type=Mul]; -"652 bert/encoder/layer_2/intermediate/dense/Tanh" [id=652, type=Tanh]; -"653 bert/encoder/layer_2/intermediate/dense/add_1" [id=653, type=Add]; -"654 bert/encoder/layer_2/intermediate/dense/mul_2" [id=654, type=Mul]; -"655 bert/encoder/layer_2/intermediate/dense/mul_3" [id=655, type=Mul]; -"656 QuantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3^0_1" [id=656, label="656 QuantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; -"657 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3^0_1" [id=657, label="657 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; -"658 QuantizeLinear_bert/encoder/layer_2/output/dense/kernel^0_1" [id=658, label="658 QuantizeLinear_bert/encoder/layer_2/output/dense/kernel:0_1", type=QuantizeLinear]; -"659 DequantizeLinear_bert/encoder/layer_2/output/dense/kernel^0_1" [id=659, label="659 DequantizeLinear_bert/encoder/layer_2/output/dense/kernel:0_1", type=DequantizeLinear]; -"660 bert/encoder/layer_2/output/dense/MatMul" [id=660, type=MatMul]; -"661 bert/encoder/layer_2/output/dense/BiasAdd" [id=661, type=Add]; -"662 bert/encoder/layer_2/output/add" [id=662, type=Add]; -"663 bert/encoder/layer_2/output/LayerNorm/moments/mean" [id=663, type=ReduceMean]; -"664 bert/encoder/layer_2/output/LayerNorm/moments/StopGradient" [id=664, type=Identity]; -"665 bert/encoder/layer_2/output/LayerNorm/moments/SquaredDifference" [id=665, type=Sub]; -"666 bert/encoder/layer_2/output/LayerNorm/moments/SquaredDifference__341" [id=666, type=Mul]; -"667 bert/encoder/layer_2/output/LayerNorm/moments/variance" [id=667, type=ReduceMean]; -"668 bert/encoder/layer_2/output/LayerNorm/batchnorm/add" [id=668, type=Add]; -"669 bert/encoder/layer_2/output/LayerNorm/batchnorm/Rsqrt" [id=669, type=Sqrt]; -"670 bert/encoder/layer_2/output/LayerNorm/batchnorm/Rsqrt__343" [id=670, type=Reciprocal]; -"671 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul" [id=671, type=Mul]; -"672 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_2" [id=672, type=Mul]; -"673 bert/encoder/layer_2/output/LayerNorm/batchnorm/sub" [id=673, type=Sub]; -"674 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_1" [id=674, type=Mul]; -"675 bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1" [id=675, type=Add]; -"676 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_1" [id=676, label="676 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"677 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_1" [id=677, label="677 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"678 QuantizeLinear_bert/encoder/layer_3/attention/self/value/kernel^0_1" [id=678, label="678 QuantizeLinear_bert/encoder/layer_3/attention/self/value/kernel:0_1", type=QuantizeLinear]; -"679 DequantizeLinear_bert/encoder/layer_3/attention/self/value/kernel^0_1" [id=679, label="679 DequantizeLinear_bert/encoder/layer_3/attention/self/value/kernel:0_1", type=DequantizeLinear]; -"680 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_2" [id=680, label="680 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; -"681 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_2" [id=681, label="681 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; -"682 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_3" [id=682, label="682 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; -"683 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_3" [id=683, label="683 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; -"684 bert/encoder/layer_3/attention/self/value/MatMul" [id=684, type=MatMul]; -"685 bert/encoder/layer_3/attention/self/value/BiasAdd" [id=685, type=Add]; -"686 bert/encoder/layer_3/attention/self/Reshape_2" [id=686, type=Reshape]; -"687 bert/encoder/layer_3/attention/self/transpose_2" [id=687, type=Transpose]; -"688 QuantizeLinear_bert/encoder/layer_3/attention/self/query/kernel^0_1" [id=688, label="688 QuantizeLinear_bert/encoder/layer_3/attention/self/query/kernel:0_1", type=QuantizeLinear]; -"689 DequantizeLinear_bert/encoder/layer_3/attention/self/query/kernel^0_1" [id=689, label="689 DequantizeLinear_bert/encoder/layer_3/attention/self/query/kernel:0_1", type=DequantizeLinear]; -"690 bert/encoder/layer_3/attention/self/query/MatMul" [id=690, type=MatMul]; -"691 bert/encoder/layer_3/attention/self/query/BiasAdd" [id=691, type=Add]; -"692 QuantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd^0_1" [id=692, label="692 QuantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; -"693 DequantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd^0_1" [id=693, label="693 DequantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; -"694 bert/encoder/layer_3/attention/self/Reshape" [id=694, type=Reshape]; -"695 bert/encoder/layer_3/attention/self/transpose" [id=695, type=Transpose]; -"696 QuantizeLinear_bert/encoder/layer_3/attention/self/key/kernel^0_1" [id=696, label="696 QuantizeLinear_bert/encoder/layer_3/attention/self/key/kernel:0_1", type=QuantizeLinear]; -"697 DequantizeLinear_bert/encoder/layer_3/attention/self/key/kernel^0_1" [id=697, label="697 DequantizeLinear_bert/encoder/layer_3/attention/self/key/kernel:0_1", type=DequantizeLinear]; -"698 bert/encoder/layer_3/attention/self/key/MatMul" [id=698, type=MatMul]; -"699 bert/encoder/layer_3/attention/self/key/BiasAdd" [id=699, type=Add]; -"700 QuantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd^0_1" [id=700, label="700 QuantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; -"701 DequantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd^0_1" [id=701, label="701 DequantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; -"702 bert/encoder/layer_3/attention/self/Reshape_1" [id=702, type=Reshape]; -"703 bert/encoder/layer_3/attention/self/transpose_1" [id=703, type=Transpose]; -"704 bert/encoder/layer_3/attention/self/MatMul__348" [id=704, type=Transpose]; -"705 bert/encoder/layer_3/attention/self/MatMul" [id=705, type=MatMul]; -"706 bert/encoder/layer_3/attention/self/Mul" [id=706, type=Mul]; -"707 bert/encoder/layer_3/attention/self/add" [id=707, type=Add]; -"708 bert/encoder/layer_3/attention/self/Softmax" [id=708, type=Softmax]; -"709 bert/encoder/layer_3/attention/self/MatMul_1" [id=709, type=MatMul]; -"710 QuantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1^0_1" [id=710, label="710 QuantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1:0_1", type=QuantizeLinear]; -"711 DequantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1^0_1" [id=711, label="711 DequantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1:0_1", type=DequantizeLinear]; -"712 bert/encoder/layer_3/attention/self/transpose_3" [id=712, type=Transpose]; -"713 bert/encoder/layer_3/attention/self/Reshape_3" [id=713, type=Reshape]; -"714 QuantizeLinear_bert/encoder/layer_3/attention/output/dense/kernel^0_1" [id=714, label="714 QuantizeLinear_bert/encoder/layer_3/attention/output/dense/kernel:0_1", type=QuantizeLinear]; -"715 DequantizeLinear_bert/encoder/layer_3/attention/output/dense/kernel^0_1" [id=715, label="715 DequantizeLinear_bert/encoder/layer_3/attention/output/dense/kernel:0_1", type=DequantizeLinear]; -"716 bert/encoder/layer_3/attention/output/dense/MatMul" [id=716, type=MatMul]; -"717 bert/encoder/layer_3/attention/output/dense/BiasAdd" [id=717, type=Add]; -"718 bert/encoder/layer_3/attention/output/add" [id=718, type=Add]; -"719 bert/encoder/layer_3/attention/output/LayerNorm/moments/mean" [id=719, type=ReduceMean]; -"720 bert/encoder/layer_3/attention/output/LayerNorm/moments/StopGradient" [id=720, type=Identity]; -"721 bert/encoder/layer_3/attention/output/LayerNorm/moments/SquaredDifference" [id=721, type=Sub]; -"722 bert/encoder/layer_3/attention/output/LayerNorm/moments/SquaredDifference__351" [id=722, type=Mul]; -"723 bert/encoder/layer_3/attention/output/LayerNorm/moments/variance" [id=723, type=ReduceMean]; -"724 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add" [id=724, type=Add]; -"725 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/Rsqrt" [id=725, type=Sqrt]; -"726 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/Rsqrt__353" [id=726, type=Reciprocal]; -"727 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul" [id=727, type=Mul]; -"728 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_2" [id=728, type=Mul]; -"729 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/sub" [id=729, type=Sub]; -"730 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_1" [id=730, type=Mul]; -"731 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1" [id=731, type=Add]; -"732 QuantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=732, label="732 QuantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"733 DequantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=733, label="733 DequantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"734 QuantizeLinear_bert/encoder/layer_3/intermediate/dense/kernel^0_1" [id=734, label="734 QuantizeLinear_bert/encoder/layer_3/intermediate/dense/kernel:0_1", type=QuantizeLinear]; -"735 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/kernel^0_1" [id=735, label="735 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/kernel:0_1", type=DequantizeLinear]; -"736 bert/encoder/layer_3/intermediate/dense/MatMul" [id=736, type=MatMul]; -"737 bert/encoder/layer_3/intermediate/dense/BiasAdd" [id=737, type=Add]; -"738 bert/encoder/layer_3/intermediate/dense/Pow" [id=738, type=Pow]; -"739 bert/encoder/layer_3/intermediate/dense/mul" [id=739, type=Mul]; -"740 bert/encoder/layer_3/intermediate/dense/add" [id=740, type=Add]; -"741 bert/encoder/layer_3/intermediate/dense/mul_1" [id=741, type=Mul]; -"742 bert/encoder/layer_3/intermediate/dense/Tanh" [id=742, type=Tanh]; -"743 bert/encoder/layer_3/intermediate/dense/add_1" [id=743, type=Add]; -"744 bert/encoder/layer_3/intermediate/dense/mul_2" [id=744, type=Mul]; -"745 bert/encoder/layer_3/intermediate/dense/mul_3" [id=745, type=Mul]; -"746 QuantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3^0_1" [id=746, label="746 QuantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; -"747 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3^0_1" [id=747, label="747 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; -"748 QuantizeLinear_bert/encoder/layer_3/output/dense/kernel^0_1" [id=748, label="748 QuantizeLinear_bert/encoder/layer_3/output/dense/kernel:0_1", type=QuantizeLinear]; -"749 DequantizeLinear_bert/encoder/layer_3/output/dense/kernel^0_1" [id=749, label="749 DequantizeLinear_bert/encoder/layer_3/output/dense/kernel:0_1", type=DequantizeLinear]; -"750 bert/encoder/layer_3/output/dense/MatMul" [id=750, type=MatMul]; -"751 bert/encoder/layer_3/output/dense/BiasAdd" [id=751, type=Add]; -"752 bert/encoder/layer_3/output/add" [id=752, type=Add]; -"753 bert/encoder/layer_3/output/LayerNorm/moments/mean" [id=753, type=ReduceMean]; -"754 bert/encoder/layer_3/output/LayerNorm/moments/StopGradient" [id=754, type=Identity]; -"755 bert/encoder/layer_3/output/LayerNorm/moments/SquaredDifference" [id=755, type=Sub]; -"756 bert/encoder/layer_3/output/LayerNorm/moments/SquaredDifference__355" [id=756, type=Mul]; -"757 bert/encoder/layer_3/output/LayerNorm/moments/variance" [id=757, type=ReduceMean]; -"758 bert/encoder/layer_3/output/LayerNorm/batchnorm/add" [id=758, type=Add]; -"759 bert/encoder/layer_3/output/LayerNorm/batchnorm/Rsqrt" [id=759, type=Sqrt]; -"760 bert/encoder/layer_3/output/LayerNorm/batchnorm/Rsqrt__357" [id=760, type=Reciprocal]; -"761 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul" [id=761, type=Mul]; -"762 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_2" [id=762, type=Mul]; -"763 bert/encoder/layer_3/output/LayerNorm/batchnorm/sub" [id=763, type=Sub]; -"764 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_1" [id=764, type=Mul]; -"765 bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1" [id=765, type=Add]; -"766 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_1" [id=766, label="766 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"767 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_1" [id=767, label="767 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"768 QuantizeLinear_bert/encoder/layer_4/attention/self/value/kernel^0_1" [id=768, label="768 QuantizeLinear_bert/encoder/layer_4/attention/self/value/kernel:0_1", type=QuantizeLinear]; -"769 DequantizeLinear_bert/encoder/layer_4/attention/self/value/kernel^0_1" [id=769, label="769 DequantizeLinear_bert/encoder/layer_4/attention/self/value/kernel:0_1", type=DequantizeLinear]; -"770 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_2" [id=770, label="770 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; -"771 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_2" [id=771, label="771 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; -"772 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_3" [id=772, label="772 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; -"773 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_3" [id=773, label="773 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; -"774 bert/encoder/layer_4/attention/self/value/MatMul" [id=774, type=MatMul]; -"775 bert/encoder/layer_4/attention/self/value/BiasAdd" [id=775, type=Add]; -"776 bert/encoder/layer_4/attention/self/Reshape_2" [id=776, type=Reshape]; -"777 bert/encoder/layer_4/attention/self/transpose_2" [id=777, type=Transpose]; -"778 QuantizeLinear_bert/encoder/layer_4/attention/self/query/kernel^0_1" [id=778, label="778 QuantizeLinear_bert/encoder/layer_4/attention/self/query/kernel:0_1", type=QuantizeLinear]; -"779 DequantizeLinear_bert/encoder/layer_4/attention/self/query/kernel^0_1" [id=779, label="779 DequantizeLinear_bert/encoder/layer_4/attention/self/query/kernel:0_1", type=DequantizeLinear]; -"780 bert/encoder/layer_4/attention/self/query/MatMul" [id=780, type=MatMul]; -"781 bert/encoder/layer_4/attention/self/query/BiasAdd" [id=781, type=Add]; -"782 QuantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd^0_1" [id=782, label="782 QuantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; -"783 DequantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd^0_1" [id=783, label="783 DequantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; -"784 bert/encoder/layer_4/attention/self/Reshape" [id=784, type=Reshape]; -"785 bert/encoder/layer_4/attention/self/transpose" [id=785, type=Transpose]; -"786 QuantizeLinear_bert/encoder/layer_4/attention/self/key/kernel^0_1" [id=786, label="786 QuantizeLinear_bert/encoder/layer_4/attention/self/key/kernel:0_1", type=QuantizeLinear]; -"787 DequantizeLinear_bert/encoder/layer_4/attention/self/key/kernel^0_1" [id=787, label="787 DequantizeLinear_bert/encoder/layer_4/attention/self/key/kernel:0_1", type=DequantizeLinear]; -"788 bert/encoder/layer_4/attention/self/key/MatMul" [id=788, type=MatMul]; -"789 bert/encoder/layer_4/attention/self/key/BiasAdd" [id=789, type=Add]; -"790 QuantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd^0_1" [id=790, label="790 QuantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; -"791 DequantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd^0_1" [id=791, label="791 DequantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; -"792 bert/encoder/layer_4/attention/self/Reshape_1" [id=792, type=Reshape]; -"793 bert/encoder/layer_4/attention/self/transpose_1" [id=793, type=Transpose]; -"794 bert/encoder/layer_4/attention/self/MatMul__362" [id=794, type=Transpose]; -"795 bert/encoder/layer_4/attention/self/MatMul" [id=795, type=MatMul]; -"796 bert/encoder/layer_4/attention/self/Mul" [id=796, type=Mul]; -"797 bert/encoder/layer_4/attention/self/add" [id=797, type=Add]; -"798 bert/encoder/layer_4/attention/self/Softmax" [id=798, type=Softmax]; -"799 bert/encoder/layer_4/attention/self/MatMul_1" [id=799, type=MatMul]; -"800 QuantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1^0_1" [id=800, label="800 QuantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1:0_1", type=QuantizeLinear]; -"801 DequantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1^0_1" [id=801, label="801 DequantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1:0_1", type=DequantizeLinear]; -"802 bert/encoder/layer_4/attention/self/transpose_3" [id=802, type=Transpose]; -"803 bert/encoder/layer_4/attention/self/Reshape_3" [id=803, type=Reshape]; -"804 QuantizeLinear_bert/encoder/layer_4/attention/output/dense/kernel^0_1" [id=804, label="804 QuantizeLinear_bert/encoder/layer_4/attention/output/dense/kernel:0_1", type=QuantizeLinear]; -"805 DequantizeLinear_bert/encoder/layer_4/attention/output/dense/kernel^0_1" [id=805, label="805 DequantizeLinear_bert/encoder/layer_4/attention/output/dense/kernel:0_1", type=DequantizeLinear]; -"806 bert/encoder/layer_4/attention/output/dense/MatMul" [id=806, type=MatMul]; -"807 bert/encoder/layer_4/attention/output/dense/BiasAdd" [id=807, type=Add]; -"808 bert/encoder/layer_4/attention/output/add" [id=808, type=Add]; -"809 bert/encoder/layer_4/attention/output/LayerNorm/moments/mean" [id=809, type=ReduceMean]; -"810 bert/encoder/layer_4/attention/output/LayerNorm/moments/StopGradient" [id=810, type=Identity]; -"811 bert/encoder/layer_4/attention/output/LayerNorm/moments/SquaredDifference" [id=811, type=Sub]; -"812 bert/encoder/layer_4/attention/output/LayerNorm/moments/SquaredDifference__365" [id=812, type=Mul]; -"813 bert/encoder/layer_4/attention/output/LayerNorm/moments/variance" [id=813, type=ReduceMean]; -"814 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add" [id=814, type=Add]; -"815 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/Rsqrt" [id=815, type=Sqrt]; -"816 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/Rsqrt__367" [id=816, type=Reciprocal]; -"817 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul" [id=817, type=Mul]; -"818 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_2" [id=818, type=Mul]; -"819 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/sub" [id=819, type=Sub]; -"820 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_1" [id=820, type=Mul]; -"821 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1" [id=821, type=Add]; -"822 QuantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=822, label="822 QuantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"823 DequantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=823, label="823 DequantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"824 QuantizeLinear_bert/encoder/layer_4/intermediate/dense/kernel^0_1" [id=824, label="824 QuantizeLinear_bert/encoder/layer_4/intermediate/dense/kernel:0_1", type=QuantizeLinear]; -"825 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/kernel^0_1" [id=825, label="825 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/kernel:0_1", type=DequantizeLinear]; -"826 bert/encoder/layer_4/intermediate/dense/MatMul" [id=826, type=MatMul]; -"827 bert/encoder/layer_4/intermediate/dense/BiasAdd" [id=827, type=Add]; -"828 bert/encoder/layer_4/intermediate/dense/Pow" [id=828, type=Pow]; -"829 bert/encoder/layer_4/intermediate/dense/mul" [id=829, type=Mul]; -"830 bert/encoder/layer_4/intermediate/dense/add" [id=830, type=Add]; -"831 bert/encoder/layer_4/intermediate/dense/mul_1" [id=831, type=Mul]; -"832 bert/encoder/layer_4/intermediate/dense/Tanh" [id=832, type=Tanh]; -"833 bert/encoder/layer_4/intermediate/dense/add_1" [id=833, type=Add]; -"834 bert/encoder/layer_4/intermediate/dense/mul_2" [id=834, type=Mul]; -"835 bert/encoder/layer_4/intermediate/dense/mul_3" [id=835, type=Mul]; -"836 QuantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3^0_1" [id=836, label="836 QuantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; -"837 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3^0_1" [id=837, label="837 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; -"838 QuantizeLinear_bert/encoder/layer_4/output/dense/kernel^0_1" [id=838, label="838 QuantizeLinear_bert/encoder/layer_4/output/dense/kernel:0_1", type=QuantizeLinear]; -"839 DequantizeLinear_bert/encoder/layer_4/output/dense/kernel^0_1" [id=839, label="839 DequantizeLinear_bert/encoder/layer_4/output/dense/kernel:0_1", type=DequantizeLinear]; -"840 bert/encoder/layer_4/output/dense/MatMul" [id=840, type=MatMul]; -"841 bert/encoder/layer_4/output/dense/BiasAdd" [id=841, type=Add]; -"842 bert/encoder/layer_4/output/add" [id=842, type=Add]; -"843 bert/encoder/layer_4/output/LayerNorm/moments/mean" [id=843, type=ReduceMean]; -"844 bert/encoder/layer_4/output/LayerNorm/moments/StopGradient" [id=844, type=Identity]; -"845 bert/encoder/layer_4/output/LayerNorm/moments/SquaredDifference" [id=845, type=Sub]; -"846 bert/encoder/layer_4/output/LayerNorm/moments/SquaredDifference__369" [id=846, type=Mul]; -"847 bert/encoder/layer_4/output/LayerNorm/moments/variance" [id=847, type=ReduceMean]; -"848 bert/encoder/layer_4/output/LayerNorm/batchnorm/add" [id=848, type=Add]; -"849 bert/encoder/layer_4/output/LayerNorm/batchnorm/Rsqrt" [id=849, type=Sqrt]; -"850 bert/encoder/layer_4/output/LayerNorm/batchnorm/Rsqrt__371" [id=850, type=Reciprocal]; -"851 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul" [id=851, type=Mul]; -"852 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_2" [id=852, type=Mul]; -"853 bert/encoder/layer_4/output/LayerNorm/batchnorm/sub" [id=853, type=Sub]; -"854 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_1" [id=854, type=Mul]; -"855 bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1" [id=855, type=Add]; -"856 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_1" [id=856, label="856 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"857 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_1" [id=857, label="857 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"858 QuantizeLinear_bert/encoder/layer_5/attention/self/value/kernel^0_1" [id=858, label="858 QuantizeLinear_bert/encoder/layer_5/attention/self/value/kernel:0_1", type=QuantizeLinear]; -"859 DequantizeLinear_bert/encoder/layer_5/attention/self/value/kernel^0_1" [id=859, label="859 DequantizeLinear_bert/encoder/layer_5/attention/self/value/kernel:0_1", type=DequantizeLinear]; -"860 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_2" [id=860, label="860 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; -"861 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_2" [id=861, label="861 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; -"862 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_3" [id=862, label="862 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; -"863 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_3" [id=863, label="863 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; -"864 bert/encoder/layer_5/attention/self/value/MatMul" [id=864, type=MatMul]; -"865 bert/encoder/layer_5/attention/self/value/BiasAdd" [id=865, type=Add]; -"866 bert/encoder/layer_5/attention/self/Reshape_2" [id=866, type=Reshape]; -"867 bert/encoder/layer_5/attention/self/transpose_2" [id=867, type=Transpose]; -"868 QuantizeLinear_bert/encoder/layer_5/attention/self/query/kernel^0_1" [id=868, label="868 QuantizeLinear_bert/encoder/layer_5/attention/self/query/kernel:0_1", type=QuantizeLinear]; -"869 DequantizeLinear_bert/encoder/layer_5/attention/self/query/kernel^0_1" [id=869, label="869 DequantizeLinear_bert/encoder/layer_5/attention/self/query/kernel:0_1", type=DequantizeLinear]; -"870 bert/encoder/layer_5/attention/self/query/MatMul" [id=870, type=MatMul]; -"871 bert/encoder/layer_5/attention/self/query/BiasAdd" [id=871, type=Add]; -"872 QuantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd^0_1" [id=872, label="872 QuantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; -"873 DequantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd^0_1" [id=873, label="873 DequantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; -"874 bert/encoder/layer_5/attention/self/Reshape" [id=874, type=Reshape]; -"875 bert/encoder/layer_5/attention/self/transpose" [id=875, type=Transpose]; -"876 QuantizeLinear_bert/encoder/layer_5/attention/self/key/kernel^0_1" [id=876, label="876 QuantizeLinear_bert/encoder/layer_5/attention/self/key/kernel:0_1", type=QuantizeLinear]; -"877 DequantizeLinear_bert/encoder/layer_5/attention/self/key/kernel^0_1" [id=877, label="877 DequantizeLinear_bert/encoder/layer_5/attention/self/key/kernel:0_1", type=DequantizeLinear]; -"878 bert/encoder/layer_5/attention/self/key/MatMul" [id=878, type=MatMul]; -"879 bert/encoder/layer_5/attention/self/key/BiasAdd" [id=879, type=Add]; -"880 QuantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd^0_1" [id=880, label="880 QuantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; -"881 DequantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd^0_1" [id=881, label="881 DequantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; -"882 bert/encoder/layer_5/attention/self/Reshape_1" [id=882, type=Reshape]; -"883 bert/encoder/layer_5/attention/self/transpose_1" [id=883, type=Transpose]; -"884 bert/encoder/layer_5/attention/self/MatMul__376" [id=884, type=Transpose]; -"885 bert/encoder/layer_5/attention/self/MatMul" [id=885, type=MatMul]; -"886 bert/encoder/layer_5/attention/self/Mul" [id=886, type=Mul]; -"887 bert/encoder/layer_5/attention/self/add" [id=887, type=Add]; -"888 bert/encoder/layer_5/attention/self/Softmax" [id=888, type=Softmax]; -"889 bert/encoder/layer_5/attention/self/MatMul_1" [id=889, type=MatMul]; -"890 QuantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1^0_1" [id=890, label="890 QuantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1:0_1", type=QuantizeLinear]; -"891 DequantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1^0_1" [id=891, label="891 DequantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1:0_1", type=DequantizeLinear]; -"892 bert/encoder/layer_5/attention/self/transpose_3" [id=892, type=Transpose]; -"893 bert/encoder/layer_5/attention/self/Reshape_3" [id=893, type=Reshape]; -"894 QuantizeLinear_bert/encoder/layer_5/attention/output/dense/kernel^0_1" [id=894, label="894 QuantizeLinear_bert/encoder/layer_5/attention/output/dense/kernel:0_1", type=QuantizeLinear]; -"895 DequantizeLinear_bert/encoder/layer_5/attention/output/dense/kernel^0_1" [id=895, label="895 DequantizeLinear_bert/encoder/layer_5/attention/output/dense/kernel:0_1", type=DequantizeLinear]; -"896 bert/encoder/layer_5/attention/output/dense/MatMul" [id=896, type=MatMul]; -"897 bert/encoder/layer_5/attention/output/dense/BiasAdd" [id=897, type=Add]; -"898 bert/encoder/layer_5/attention/output/add" [id=898, type=Add]; -"899 bert/encoder/layer_5/attention/output/LayerNorm/moments/mean" [id=899, type=ReduceMean]; -"900 bert/encoder/layer_5/attention/output/LayerNorm/moments/StopGradient" [id=900, type=Identity]; -"901 bert/encoder/layer_5/attention/output/LayerNorm/moments/SquaredDifference" [id=901, type=Sub]; -"902 bert/encoder/layer_5/attention/output/LayerNorm/moments/SquaredDifference__379" [id=902, type=Mul]; -"903 bert/encoder/layer_5/attention/output/LayerNorm/moments/variance" [id=903, type=ReduceMean]; -"904 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add" [id=904, type=Add]; -"905 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/Rsqrt" [id=905, type=Sqrt]; -"906 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/Rsqrt__381" [id=906, type=Reciprocal]; -"907 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul" [id=907, type=Mul]; -"908 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_2" [id=908, type=Mul]; -"909 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/sub" [id=909, type=Sub]; -"910 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_1" [id=910, type=Mul]; -"911 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1" [id=911, type=Add]; -"912 QuantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=912, label="912 QuantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"913 DequantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=913, label="913 DequantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"914 QuantizeLinear_bert/encoder/layer_5/intermediate/dense/kernel^0_1" [id=914, label="914 QuantizeLinear_bert/encoder/layer_5/intermediate/dense/kernel:0_1", type=QuantizeLinear]; -"915 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/kernel^0_1" [id=915, label="915 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/kernel:0_1", type=DequantizeLinear]; -"916 bert/encoder/layer_5/intermediate/dense/MatMul" [id=916, type=MatMul]; -"917 bert/encoder/layer_5/intermediate/dense/BiasAdd" [id=917, type=Add]; -"918 bert/encoder/layer_5/intermediate/dense/Pow" [id=918, type=Pow]; -"919 bert/encoder/layer_5/intermediate/dense/mul" [id=919, type=Mul]; -"920 bert/encoder/layer_5/intermediate/dense/add" [id=920, type=Add]; -"921 bert/encoder/layer_5/intermediate/dense/mul_1" [id=921, type=Mul]; -"922 bert/encoder/layer_5/intermediate/dense/Tanh" [id=922, type=Tanh]; -"923 bert/encoder/layer_5/intermediate/dense/add_1" [id=923, type=Add]; -"924 bert/encoder/layer_5/intermediate/dense/mul_2" [id=924, type=Mul]; -"925 bert/encoder/layer_5/intermediate/dense/mul_3" [id=925, type=Mul]; -"926 QuantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3^0_1" [id=926, label="926 QuantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; -"927 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3^0_1" [id=927, label="927 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; -"928 QuantizeLinear_bert/encoder/layer_5/output/dense/kernel^0_1" [id=928, label="928 QuantizeLinear_bert/encoder/layer_5/output/dense/kernel:0_1", type=QuantizeLinear]; -"929 DequantizeLinear_bert/encoder/layer_5/output/dense/kernel^0_1" [id=929, label="929 DequantizeLinear_bert/encoder/layer_5/output/dense/kernel:0_1", type=DequantizeLinear]; -"930 bert/encoder/layer_5/output/dense/MatMul" [id=930, type=MatMul]; -"931 bert/encoder/layer_5/output/dense/BiasAdd" [id=931, type=Add]; -"932 bert/encoder/layer_5/output/add" [id=932, type=Add]; -"933 bert/encoder/layer_5/output/LayerNorm/moments/mean" [id=933, type=ReduceMean]; -"934 bert/encoder/layer_5/output/LayerNorm/moments/StopGradient" [id=934, type=Identity]; -"935 bert/encoder/layer_5/output/LayerNorm/moments/SquaredDifference" [id=935, type=Sub]; -"936 bert/encoder/layer_5/output/LayerNorm/moments/SquaredDifference__383" [id=936, type=Mul]; -"937 bert/encoder/layer_5/output/LayerNorm/moments/variance" [id=937, type=ReduceMean]; -"938 bert/encoder/layer_5/output/LayerNorm/batchnorm/add" [id=938, type=Add]; -"939 bert/encoder/layer_5/output/LayerNorm/batchnorm/Rsqrt" [id=939, type=Sqrt]; -"940 bert/encoder/layer_5/output/LayerNorm/batchnorm/Rsqrt__385" [id=940, type=Reciprocal]; -"941 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul" [id=941, type=Mul]; -"942 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_2" [id=942, type=Mul]; -"943 bert/encoder/layer_5/output/LayerNorm/batchnorm/sub" [id=943, type=Sub]; -"944 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_1" [id=944, type=Mul]; -"945 bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1" [id=945, type=Add]; -"946 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_1" [id=946, label="946 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"947 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_1" [id=947, label="947 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"948 QuantizeLinear_bert/encoder/layer_6/attention/self/value/kernel^0_1" [id=948, label="948 QuantizeLinear_bert/encoder/layer_6/attention/self/value/kernel:0_1", type=QuantizeLinear]; -"949 DequantizeLinear_bert/encoder/layer_6/attention/self/value/kernel^0_1" [id=949, label="949 DequantizeLinear_bert/encoder/layer_6/attention/self/value/kernel:0_1", type=DequantizeLinear]; -"950 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_2" [id=950, label="950 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; -"951 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_2" [id=951, label="951 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; -"952 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_3" [id=952, label="952 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; -"953 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_3" [id=953, label="953 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; -"954 bert/encoder/layer_6/attention/self/value/MatMul" [id=954, type=MatMul]; -"955 bert/encoder/layer_6/attention/self/value/BiasAdd" [id=955, type=Add]; -"956 bert/encoder/layer_6/attention/self/Reshape_2" [id=956, type=Reshape]; -"957 bert/encoder/layer_6/attention/self/transpose_2" [id=957, type=Transpose]; -"958 QuantizeLinear_bert/encoder/layer_6/attention/self/query/kernel^0_1" [id=958, label="958 QuantizeLinear_bert/encoder/layer_6/attention/self/query/kernel:0_1", type=QuantizeLinear]; -"959 DequantizeLinear_bert/encoder/layer_6/attention/self/query/kernel^0_1" [id=959, label="959 DequantizeLinear_bert/encoder/layer_6/attention/self/query/kernel:0_1", type=DequantizeLinear]; -"960 bert/encoder/layer_6/attention/self/query/MatMul" [id=960, type=MatMul]; -"961 bert/encoder/layer_6/attention/self/query/BiasAdd" [id=961, type=Add]; -"962 QuantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd^0_1" [id=962, label="962 QuantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; -"963 DequantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd^0_1" [id=963, label="963 DequantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; -"964 bert/encoder/layer_6/attention/self/Reshape" [id=964, type=Reshape]; -"965 bert/encoder/layer_6/attention/self/transpose" [id=965, type=Transpose]; -"966 QuantizeLinear_bert/encoder/layer_6/attention/self/key/kernel^0_1" [id=966, label="966 QuantizeLinear_bert/encoder/layer_6/attention/self/key/kernel:0_1", type=QuantizeLinear]; -"967 DequantizeLinear_bert/encoder/layer_6/attention/self/key/kernel^0_1" [id=967, label="967 DequantizeLinear_bert/encoder/layer_6/attention/self/key/kernel:0_1", type=DequantizeLinear]; -"968 bert/encoder/layer_6/attention/self/key/MatMul" [id=968, type=MatMul]; -"969 bert/encoder/layer_6/attention/self/key/BiasAdd" [id=969, type=Add]; -"970 QuantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd^0_1" [id=970, label="970 QuantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; -"971 DequantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd^0_1" [id=971, label="971 DequantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; -"972 bert/encoder/layer_6/attention/self/Reshape_1" [id=972, type=Reshape]; -"973 bert/encoder/layer_6/attention/self/transpose_1" [id=973, type=Transpose]; -"974 bert/encoder/layer_6/attention/self/MatMul__390" [id=974, type=Transpose]; -"975 bert/encoder/layer_6/attention/self/MatMul" [id=975, type=MatMul]; -"976 bert/encoder/layer_6/attention/self/Mul" [id=976, type=Mul]; -"977 bert/encoder/layer_6/attention/self/add" [id=977, type=Add]; -"978 bert/encoder/layer_6/attention/self/Softmax" [id=978, type=Softmax]; -"979 bert/encoder/layer_6/attention/self/MatMul_1" [id=979, type=MatMul]; -"980 QuantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1^0_1" [id=980, label="980 QuantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1:0_1", type=QuantizeLinear]; -"981 DequantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1^0_1" [id=981, label="981 DequantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1:0_1", type=DequantizeLinear]; -"982 bert/encoder/layer_6/attention/self/transpose_3" [id=982, type=Transpose]; -"983 bert/encoder/layer_6/attention/self/Reshape_3" [id=983, type=Reshape]; -"984 QuantizeLinear_bert/encoder/layer_6/attention/output/dense/kernel^0_1" [id=984, label="984 QuantizeLinear_bert/encoder/layer_6/attention/output/dense/kernel:0_1", type=QuantizeLinear]; -"985 DequantizeLinear_bert/encoder/layer_6/attention/output/dense/kernel^0_1" [id=985, label="985 DequantizeLinear_bert/encoder/layer_6/attention/output/dense/kernel:0_1", type=DequantizeLinear]; -"986 bert/encoder/layer_6/attention/output/dense/MatMul" [id=986, type=MatMul]; -"987 bert/encoder/layer_6/attention/output/dense/BiasAdd" [id=987, type=Add]; -"988 bert/encoder/layer_6/attention/output/add" [id=988, type=Add]; -"989 bert/encoder/layer_6/attention/output/LayerNorm/moments/mean" [id=989, type=ReduceMean]; -"990 bert/encoder/layer_6/attention/output/LayerNorm/moments/StopGradient" [id=990, type=Identity]; -"991 bert/encoder/layer_6/attention/output/LayerNorm/moments/SquaredDifference" [id=991, type=Sub]; -"992 bert/encoder/layer_6/attention/output/LayerNorm/moments/SquaredDifference__393" [id=992, type=Mul]; -"993 bert/encoder/layer_6/attention/output/LayerNorm/moments/variance" [id=993, type=ReduceMean]; -"994 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add" [id=994, type=Add]; -"995 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/Rsqrt" [id=995, type=Sqrt]; -"996 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/Rsqrt__395" [id=996, type=Reciprocal]; -"997 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul" [id=997, type=Mul]; -"998 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_2" [id=998, type=Mul]; -"999 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/sub" [id=999, type=Sub]; -"1000 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_1" [id=1000, type=Mul]; -"1001 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1" [id=1001, type=Add]; -"1002 QuantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1002, label="1002 QuantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"1003 DequantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1003, label="1003 DequantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"1004 QuantizeLinear_bert/encoder/layer_6/intermediate/dense/kernel^0_1" [id=1004, label="1004 QuantizeLinear_bert/encoder/layer_6/intermediate/dense/kernel:0_1", type=QuantizeLinear]; -"1005 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/kernel^0_1" [id=1005, label="1005 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/kernel:0_1", type=DequantizeLinear]; -"1006 bert/encoder/layer_6/intermediate/dense/MatMul" [id=1006, type=MatMul]; -"1007 bert/encoder/layer_6/intermediate/dense/BiasAdd" [id=1007, type=Add]; -"1008 bert/encoder/layer_6/intermediate/dense/Pow" [id=1008, type=Pow]; -"1009 bert/encoder/layer_6/intermediate/dense/mul" [id=1009, type=Mul]; -"1010 bert/encoder/layer_6/intermediate/dense/add" [id=1010, type=Add]; -"1011 bert/encoder/layer_6/intermediate/dense/mul_1" [id=1011, type=Mul]; -"1012 bert/encoder/layer_6/intermediate/dense/Tanh" [id=1012, type=Tanh]; -"1013 bert/encoder/layer_6/intermediate/dense/add_1" [id=1013, type=Add]; -"1014 bert/encoder/layer_6/intermediate/dense/mul_2" [id=1014, type=Mul]; -"1015 bert/encoder/layer_6/intermediate/dense/mul_3" [id=1015, type=Mul]; -"1016 QuantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3^0_1" [id=1016, label="1016 QuantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; -"1017 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3^0_1" [id=1017, label="1017 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; -"1018 QuantizeLinear_bert/encoder/layer_6/output/dense/kernel^0_1" [id=1018, label="1018 QuantizeLinear_bert/encoder/layer_6/output/dense/kernel:0_1", type=QuantizeLinear]; -"1019 DequantizeLinear_bert/encoder/layer_6/output/dense/kernel^0_1" [id=1019, label="1019 DequantizeLinear_bert/encoder/layer_6/output/dense/kernel:0_1", type=DequantizeLinear]; -"1020 bert/encoder/layer_6/output/dense/MatMul" [id=1020, type=MatMul]; -"1021 bert/encoder/layer_6/output/dense/BiasAdd" [id=1021, type=Add]; -"1022 bert/encoder/layer_6/output/add" [id=1022, type=Add]; -"1023 bert/encoder/layer_6/output/LayerNorm/moments/mean" [id=1023, type=ReduceMean]; -"1024 bert/encoder/layer_6/output/LayerNorm/moments/StopGradient" [id=1024, type=Identity]; -"1025 bert/encoder/layer_6/output/LayerNorm/moments/SquaredDifference" [id=1025, type=Sub]; -"1026 bert/encoder/layer_6/output/LayerNorm/moments/SquaredDifference__397" [id=1026, type=Mul]; -"1027 bert/encoder/layer_6/output/LayerNorm/moments/variance" [id=1027, type=ReduceMean]; -"1028 bert/encoder/layer_6/output/LayerNorm/batchnorm/add" [id=1028, type=Add]; -"1029 bert/encoder/layer_6/output/LayerNorm/batchnorm/Rsqrt" [id=1029, type=Sqrt]; -"1030 bert/encoder/layer_6/output/LayerNorm/batchnorm/Rsqrt__399" [id=1030, type=Reciprocal]; -"1031 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul" [id=1031, type=Mul]; -"1032 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_2" [id=1032, type=Mul]; -"1033 bert/encoder/layer_6/output/LayerNorm/batchnorm/sub" [id=1033, type=Sub]; -"1034 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_1" [id=1034, type=Mul]; -"1035 bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1" [id=1035, type=Add]; -"1036 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_1" [id=1036, label="1036 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"1037 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_1" [id=1037, label="1037 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"1038 QuantizeLinear_bert/encoder/layer_7/attention/self/value/kernel^0_1" [id=1038, label="1038 QuantizeLinear_bert/encoder/layer_7/attention/self/value/kernel:0_1", type=QuantizeLinear]; -"1039 DequantizeLinear_bert/encoder/layer_7/attention/self/value/kernel^0_1" [id=1039, label="1039 DequantizeLinear_bert/encoder/layer_7/attention/self/value/kernel:0_1", type=DequantizeLinear]; -"1040 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_2" [id=1040, label="1040 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; -"1041 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_2" [id=1041, label="1041 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; -"1042 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_3" [id=1042, label="1042 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; -"1043 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_3" [id=1043, label="1043 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; -"1044 bert/encoder/layer_7/attention/self/value/MatMul" [id=1044, type=MatMul]; -"1045 bert/encoder/layer_7/attention/self/value/BiasAdd" [id=1045, type=Add]; -"1046 bert/encoder/layer_7/attention/self/Reshape_2" [id=1046, type=Reshape]; -"1047 bert/encoder/layer_7/attention/self/transpose_2" [id=1047, type=Transpose]; -"1048 QuantizeLinear_bert/encoder/layer_7/attention/self/query/kernel^0_1" [id=1048, label="1048 QuantizeLinear_bert/encoder/layer_7/attention/self/query/kernel:0_1", type=QuantizeLinear]; -"1049 DequantizeLinear_bert/encoder/layer_7/attention/self/query/kernel^0_1" [id=1049, label="1049 DequantizeLinear_bert/encoder/layer_7/attention/self/query/kernel:0_1", type=DequantizeLinear]; -"1050 bert/encoder/layer_7/attention/self/query/MatMul" [id=1050, type=MatMul]; -"1051 bert/encoder/layer_7/attention/self/query/BiasAdd" [id=1051, type=Add]; -"1052 QuantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd^0_1" [id=1052, label="1052 QuantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; -"1053 DequantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd^0_1" [id=1053, label="1053 DequantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; -"1054 bert/encoder/layer_7/attention/self/Reshape" [id=1054, type=Reshape]; -"1055 bert/encoder/layer_7/attention/self/transpose" [id=1055, type=Transpose]; -"1056 QuantizeLinear_bert/encoder/layer_7/attention/self/key/kernel^0_1" [id=1056, label="1056 QuantizeLinear_bert/encoder/layer_7/attention/self/key/kernel:0_1", type=QuantizeLinear]; -"1057 DequantizeLinear_bert/encoder/layer_7/attention/self/key/kernel^0_1" [id=1057, label="1057 DequantizeLinear_bert/encoder/layer_7/attention/self/key/kernel:0_1", type=DequantizeLinear]; -"1058 bert/encoder/layer_7/attention/self/key/MatMul" [id=1058, type=MatMul]; -"1059 bert/encoder/layer_7/attention/self/key/BiasAdd" [id=1059, type=Add]; -"1060 QuantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd^0_1" [id=1060, label="1060 QuantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; -"1061 DequantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd^0_1" [id=1061, label="1061 DequantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; -"1062 bert/encoder/layer_7/attention/self/Reshape_1" [id=1062, type=Reshape]; -"1063 bert/encoder/layer_7/attention/self/transpose_1" [id=1063, type=Transpose]; -"1064 bert/encoder/layer_7/attention/self/MatMul__404" [id=1064, type=Transpose]; -"1065 bert/encoder/layer_7/attention/self/MatMul" [id=1065, type=MatMul]; -"1066 bert/encoder/layer_7/attention/self/Mul" [id=1066, type=Mul]; -"1067 bert/encoder/layer_7/attention/self/add" [id=1067, type=Add]; -"1068 bert/encoder/layer_7/attention/self/Softmax" [id=1068, type=Softmax]; -"1069 bert/encoder/layer_7/attention/self/MatMul_1" [id=1069, type=MatMul]; -"1070 QuantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1^0_1" [id=1070, label="1070 QuantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1:0_1", type=QuantizeLinear]; -"1071 DequantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1^0_1" [id=1071, label="1071 DequantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1:0_1", type=DequantizeLinear]; -"1072 bert/encoder/layer_7/attention/self/transpose_3" [id=1072, type=Transpose]; -"1073 bert/encoder/layer_7/attention/self/Reshape_3" [id=1073, type=Reshape]; -"1074 QuantizeLinear_bert/encoder/layer_7/attention/output/dense/kernel^0_1" [id=1074, label="1074 QuantizeLinear_bert/encoder/layer_7/attention/output/dense/kernel:0_1", type=QuantizeLinear]; -"1075 DequantizeLinear_bert/encoder/layer_7/attention/output/dense/kernel^0_1" [id=1075, label="1075 DequantizeLinear_bert/encoder/layer_7/attention/output/dense/kernel:0_1", type=DequantizeLinear]; -"1076 bert/encoder/layer_7/attention/output/dense/MatMul" [id=1076, type=MatMul]; -"1077 bert/encoder/layer_7/attention/output/dense/BiasAdd" [id=1077, type=Add]; -"1078 bert/encoder/layer_7/attention/output/add" [id=1078, type=Add]; -"1079 bert/encoder/layer_7/attention/output/LayerNorm/moments/mean" [id=1079, type=ReduceMean]; -"1080 bert/encoder/layer_7/attention/output/LayerNorm/moments/StopGradient" [id=1080, type=Identity]; -"1081 bert/encoder/layer_7/attention/output/LayerNorm/moments/SquaredDifference" [id=1081, type=Sub]; -"1082 bert/encoder/layer_7/attention/output/LayerNorm/moments/SquaredDifference__407" [id=1082, type=Mul]; -"1083 bert/encoder/layer_7/attention/output/LayerNorm/moments/variance" [id=1083, type=ReduceMean]; -"1084 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add" [id=1084, type=Add]; -"1085 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/Rsqrt" [id=1085, type=Sqrt]; -"1086 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/Rsqrt__409" [id=1086, type=Reciprocal]; -"1087 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul" [id=1087, type=Mul]; -"1088 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_2" [id=1088, type=Mul]; -"1089 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/sub" [id=1089, type=Sub]; -"1090 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_1" [id=1090, type=Mul]; -"1091 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1" [id=1091, type=Add]; -"1092 QuantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1092, label="1092 QuantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"1093 DequantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1093, label="1093 DequantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"1094 QuantizeLinear_bert/encoder/layer_7/intermediate/dense/kernel^0_1" [id=1094, label="1094 QuantizeLinear_bert/encoder/layer_7/intermediate/dense/kernel:0_1", type=QuantizeLinear]; -"1095 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/kernel^0_1" [id=1095, label="1095 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/kernel:0_1", type=DequantizeLinear]; -"1096 bert/encoder/layer_7/intermediate/dense/MatMul" [id=1096, type=MatMul]; -"1097 bert/encoder/layer_7/intermediate/dense/BiasAdd" [id=1097, type=Add]; -"1098 bert/encoder/layer_7/intermediate/dense/Pow" [id=1098, type=Pow]; -"1099 bert/encoder/layer_7/intermediate/dense/mul" [id=1099, type=Mul]; -"1100 bert/encoder/layer_7/intermediate/dense/add" [id=1100, type=Add]; -"1101 bert/encoder/layer_7/intermediate/dense/mul_1" [id=1101, type=Mul]; -"1102 bert/encoder/layer_7/intermediate/dense/Tanh" [id=1102, type=Tanh]; -"1103 bert/encoder/layer_7/intermediate/dense/add_1" [id=1103, type=Add]; -"1104 bert/encoder/layer_7/intermediate/dense/mul_2" [id=1104, type=Mul]; -"1105 bert/encoder/layer_7/intermediate/dense/mul_3" [id=1105, type=Mul]; -"1106 QuantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3^0_1" [id=1106, label="1106 QuantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; -"1107 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3^0_1" [id=1107, label="1107 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; -"1108 QuantizeLinear_bert/encoder/layer_7/output/dense/kernel^0_1" [id=1108, label="1108 QuantizeLinear_bert/encoder/layer_7/output/dense/kernel:0_1", type=QuantizeLinear]; -"1109 DequantizeLinear_bert/encoder/layer_7/output/dense/kernel^0_1" [id=1109, label="1109 DequantizeLinear_bert/encoder/layer_7/output/dense/kernel:0_1", type=DequantizeLinear]; -"1110 bert/encoder/layer_7/output/dense/MatMul" [id=1110, type=MatMul]; -"1111 bert/encoder/layer_7/output/dense/BiasAdd" [id=1111, type=Add]; -"1112 bert/encoder/layer_7/output/add" [id=1112, type=Add]; -"1113 bert/encoder/layer_7/output/LayerNorm/moments/mean" [id=1113, type=ReduceMean]; -"1114 bert/encoder/layer_7/output/LayerNorm/moments/StopGradient" [id=1114, type=Identity]; -"1115 bert/encoder/layer_7/output/LayerNorm/moments/SquaredDifference" [id=1115, type=Sub]; -"1116 bert/encoder/layer_7/output/LayerNorm/moments/SquaredDifference__411" [id=1116, type=Mul]; -"1117 bert/encoder/layer_7/output/LayerNorm/moments/variance" [id=1117, type=ReduceMean]; -"1118 bert/encoder/layer_7/output/LayerNorm/batchnorm/add" [id=1118, type=Add]; -"1119 bert/encoder/layer_7/output/LayerNorm/batchnorm/Rsqrt" [id=1119, type=Sqrt]; -"1120 bert/encoder/layer_7/output/LayerNorm/batchnorm/Rsqrt__413" [id=1120, type=Reciprocal]; -"1121 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul" [id=1121, type=Mul]; -"1122 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_2" [id=1122, type=Mul]; -"1123 bert/encoder/layer_7/output/LayerNorm/batchnorm/sub" [id=1123, type=Sub]; -"1124 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_1" [id=1124, type=Mul]; -"1125 bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1" [id=1125, type=Add]; -"1126 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_1" [id=1126, label="1126 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"1127 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_1" [id=1127, label="1127 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"1128 QuantizeLinear_bert/encoder/layer_8/attention/self/value/kernel^0_1" [id=1128, label="1128 QuantizeLinear_bert/encoder/layer_8/attention/self/value/kernel:0_1", type=QuantizeLinear]; -"1129 DequantizeLinear_bert/encoder/layer_8/attention/self/value/kernel^0_1" [id=1129, label="1129 DequantizeLinear_bert/encoder/layer_8/attention/self/value/kernel:0_1", type=DequantizeLinear]; -"1130 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_2" [id=1130, label="1130 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; -"1131 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_2" [id=1131, label="1131 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; -"1132 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_3" [id=1132, label="1132 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; -"1133 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_3" [id=1133, label="1133 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; -"1134 bert/encoder/layer_8/attention/self/value/MatMul" [id=1134, type=MatMul]; -"1135 bert/encoder/layer_8/attention/self/value/BiasAdd" [id=1135, type=Add]; -"1136 bert/encoder/layer_8/attention/self/Reshape_2" [id=1136, type=Reshape]; -"1137 bert/encoder/layer_8/attention/self/transpose_2" [id=1137, type=Transpose]; -"1138 QuantizeLinear_bert/encoder/layer_8/attention/self/query/kernel^0_1" [id=1138, label="1138 QuantizeLinear_bert/encoder/layer_8/attention/self/query/kernel:0_1", type=QuantizeLinear]; -"1139 DequantizeLinear_bert/encoder/layer_8/attention/self/query/kernel^0_1" [id=1139, label="1139 DequantizeLinear_bert/encoder/layer_8/attention/self/query/kernel:0_1", type=DequantizeLinear]; -"1140 bert/encoder/layer_8/attention/self/query/MatMul" [id=1140, type=MatMul]; -"1141 bert/encoder/layer_8/attention/self/query/BiasAdd" [id=1141, type=Add]; -"1142 QuantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd^0_1" [id=1142, label="1142 QuantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; -"1143 DequantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd^0_1" [id=1143, label="1143 DequantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; -"1144 bert/encoder/layer_8/attention/self/Reshape" [id=1144, type=Reshape]; -"1145 bert/encoder/layer_8/attention/self/transpose" [id=1145, type=Transpose]; -"1146 QuantizeLinear_bert/encoder/layer_8/attention/self/key/kernel^0_1" [id=1146, label="1146 QuantizeLinear_bert/encoder/layer_8/attention/self/key/kernel:0_1", type=QuantizeLinear]; -"1147 DequantizeLinear_bert/encoder/layer_8/attention/self/key/kernel^0_1" [id=1147, label="1147 DequantizeLinear_bert/encoder/layer_8/attention/self/key/kernel:0_1", type=DequantizeLinear]; -"1148 bert/encoder/layer_8/attention/self/key/MatMul" [id=1148, type=MatMul]; -"1149 bert/encoder/layer_8/attention/self/key/BiasAdd" [id=1149, type=Add]; -"1150 QuantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd^0_1" [id=1150, label="1150 QuantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; -"1151 DequantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd^0_1" [id=1151, label="1151 DequantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; -"1152 bert/encoder/layer_8/attention/self/Reshape_1" [id=1152, type=Reshape]; -"1153 bert/encoder/layer_8/attention/self/transpose_1" [id=1153, type=Transpose]; -"1154 bert/encoder/layer_8/attention/self/MatMul__418" [id=1154, type=Transpose]; -"1155 bert/encoder/layer_8/attention/self/MatMul" [id=1155, type=MatMul]; -"1156 bert/encoder/layer_8/attention/self/Mul" [id=1156, type=Mul]; -"1157 bert/encoder/layer_8/attention/self/add" [id=1157, type=Add]; -"1158 bert/encoder/layer_8/attention/self/Softmax" [id=1158, type=Softmax]; -"1159 bert/encoder/layer_8/attention/self/MatMul_1" [id=1159, type=MatMul]; -"1160 QuantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1^0_1" [id=1160, label="1160 QuantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1:0_1", type=QuantizeLinear]; -"1161 DequantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1^0_1" [id=1161, label="1161 DequantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1:0_1", type=DequantizeLinear]; -"1162 bert/encoder/layer_8/attention/self/transpose_3" [id=1162, type=Transpose]; -"1163 bert/encoder/layer_8/attention/self/Reshape_3" [id=1163, type=Reshape]; -"1164 QuantizeLinear_bert/encoder/layer_8/attention/output/dense/kernel^0_1" [id=1164, label="1164 QuantizeLinear_bert/encoder/layer_8/attention/output/dense/kernel:0_1", type=QuantizeLinear]; -"1165 DequantizeLinear_bert/encoder/layer_8/attention/output/dense/kernel^0_1" [id=1165, label="1165 DequantizeLinear_bert/encoder/layer_8/attention/output/dense/kernel:0_1", type=DequantizeLinear]; -"1166 bert/encoder/layer_8/attention/output/dense/MatMul" [id=1166, type=MatMul]; -"1167 bert/encoder/layer_8/attention/output/dense/BiasAdd" [id=1167, type=Add]; -"1168 bert/encoder/layer_8/attention/output/add" [id=1168, type=Add]; -"1169 bert/encoder/layer_8/attention/output/LayerNorm/moments/mean" [id=1169, type=ReduceMean]; -"1170 bert/encoder/layer_8/attention/output/LayerNorm/moments/StopGradient" [id=1170, type=Identity]; -"1171 bert/encoder/layer_8/attention/output/LayerNorm/moments/SquaredDifference" [id=1171, type=Sub]; -"1172 bert/encoder/layer_8/attention/output/LayerNorm/moments/SquaredDifference__421" [id=1172, type=Mul]; -"1173 bert/encoder/layer_8/attention/output/LayerNorm/moments/variance" [id=1173, type=ReduceMean]; -"1174 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add" [id=1174, type=Add]; -"1175 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/Rsqrt" [id=1175, type=Sqrt]; -"1176 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/Rsqrt__423" [id=1176, type=Reciprocal]; -"1177 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul" [id=1177, type=Mul]; -"1178 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_2" [id=1178, type=Mul]; -"1179 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/sub" [id=1179, type=Sub]; -"1180 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_1" [id=1180, type=Mul]; -"1181 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1" [id=1181, type=Add]; -"1182 QuantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1182, label="1182 QuantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"1183 DequantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1183, label="1183 DequantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"1184 QuantizeLinear_bert/encoder/layer_8/intermediate/dense/kernel^0_1" [id=1184, label="1184 QuantizeLinear_bert/encoder/layer_8/intermediate/dense/kernel:0_1", type=QuantizeLinear]; -"1185 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/kernel^0_1" [id=1185, label="1185 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/kernel:0_1", type=DequantizeLinear]; -"1186 bert/encoder/layer_8/intermediate/dense/MatMul" [id=1186, type=MatMul]; -"1187 bert/encoder/layer_8/intermediate/dense/BiasAdd" [id=1187, type=Add]; -"1188 bert/encoder/layer_8/intermediate/dense/Pow" [id=1188, type=Pow]; -"1189 bert/encoder/layer_8/intermediate/dense/mul" [id=1189, type=Mul]; -"1190 bert/encoder/layer_8/intermediate/dense/add" [id=1190, type=Add]; -"1191 bert/encoder/layer_8/intermediate/dense/mul_1" [id=1191, type=Mul]; -"1192 bert/encoder/layer_8/intermediate/dense/Tanh" [id=1192, type=Tanh]; -"1193 bert/encoder/layer_8/intermediate/dense/add_1" [id=1193, type=Add]; -"1194 bert/encoder/layer_8/intermediate/dense/mul_2" [id=1194, type=Mul]; -"1195 bert/encoder/layer_8/intermediate/dense/mul_3" [id=1195, type=Mul]; -"1196 QuantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3^0_1" [id=1196, label="1196 QuantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; -"1197 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3^0_1" [id=1197, label="1197 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; -"1198 QuantizeLinear_bert/encoder/layer_8/output/dense/kernel^0_1" [id=1198, label="1198 QuantizeLinear_bert/encoder/layer_8/output/dense/kernel:0_1", type=QuantizeLinear]; -"1199 DequantizeLinear_bert/encoder/layer_8/output/dense/kernel^0_1" [id=1199, label="1199 DequantizeLinear_bert/encoder/layer_8/output/dense/kernel:0_1", type=DequantizeLinear]; -"1200 bert/encoder/layer_8/output/dense/MatMul" [id=1200, type=MatMul]; -"1201 bert/encoder/layer_8/output/dense/BiasAdd" [id=1201, type=Add]; -"1202 bert/encoder/layer_8/output/add" [id=1202, type=Add]; -"1203 bert/encoder/layer_8/output/LayerNorm/moments/mean" [id=1203, type=ReduceMean]; -"1204 bert/encoder/layer_8/output/LayerNorm/moments/StopGradient" [id=1204, type=Identity]; -"1205 bert/encoder/layer_8/output/LayerNorm/moments/SquaredDifference" [id=1205, type=Sub]; -"1206 bert/encoder/layer_8/output/LayerNorm/moments/SquaredDifference__425" [id=1206, type=Mul]; -"1207 bert/encoder/layer_8/output/LayerNorm/moments/variance" [id=1207, type=ReduceMean]; -"1208 bert/encoder/layer_8/output/LayerNorm/batchnorm/add" [id=1208, type=Add]; -"1209 bert/encoder/layer_8/output/LayerNorm/batchnorm/Rsqrt" [id=1209, type=Sqrt]; -"1210 bert/encoder/layer_8/output/LayerNorm/batchnorm/Rsqrt__427" [id=1210, type=Reciprocal]; -"1211 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul" [id=1211, type=Mul]; -"1212 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_2" [id=1212, type=Mul]; -"1213 bert/encoder/layer_8/output/LayerNorm/batchnorm/sub" [id=1213, type=Sub]; -"1214 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_1" [id=1214, type=Mul]; -"1215 bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1" [id=1215, type=Add]; -"1216 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_1" [id=1216, label="1216 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"1217 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_1" [id=1217, label="1217 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"1218 QuantizeLinear_bert/encoder/layer_9/attention/self/value/kernel^0_1" [id=1218, label="1218 QuantizeLinear_bert/encoder/layer_9/attention/self/value/kernel:0_1", type=QuantizeLinear]; -"1219 DequantizeLinear_bert/encoder/layer_9/attention/self/value/kernel^0_1" [id=1219, label="1219 DequantizeLinear_bert/encoder/layer_9/attention/self/value/kernel:0_1", type=DequantizeLinear]; -"1220 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_2" [id=1220, label="1220 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; -"1221 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_2" [id=1221, label="1221 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; -"1222 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_3" [id=1222, label="1222 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; -"1223 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_3" [id=1223, label="1223 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; -"1224 bert/encoder/layer_9/attention/self/value/MatMul" [id=1224, type=MatMul]; -"1225 bert/encoder/layer_9/attention/self/value/BiasAdd" [id=1225, type=Add]; -"1226 bert/encoder/layer_9/attention/self/Reshape_2" [id=1226, type=Reshape]; -"1227 bert/encoder/layer_9/attention/self/transpose_2" [id=1227, type=Transpose]; -"1228 QuantizeLinear_bert/encoder/layer_9/attention/self/query/kernel^0_1" [id=1228, label="1228 QuantizeLinear_bert/encoder/layer_9/attention/self/query/kernel:0_1", type=QuantizeLinear]; -"1229 DequantizeLinear_bert/encoder/layer_9/attention/self/query/kernel^0_1" [id=1229, label="1229 DequantizeLinear_bert/encoder/layer_9/attention/self/query/kernel:0_1", type=DequantizeLinear]; -"1230 bert/encoder/layer_9/attention/self/query/MatMul" [id=1230, type=MatMul]; -"1231 bert/encoder/layer_9/attention/self/query/BiasAdd" [id=1231, type=Add]; -"1232 QuantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd^0_1" [id=1232, label="1232 QuantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; -"1233 DequantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd^0_1" [id=1233, label="1233 DequantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; -"1234 bert/encoder/layer_9/attention/self/Reshape" [id=1234, type=Reshape]; -"1235 bert/encoder/layer_9/attention/self/transpose" [id=1235, type=Transpose]; -"1236 QuantizeLinear_bert/encoder/layer_9/attention/self/key/kernel^0_1" [id=1236, label="1236 QuantizeLinear_bert/encoder/layer_9/attention/self/key/kernel:0_1", type=QuantizeLinear]; -"1237 DequantizeLinear_bert/encoder/layer_9/attention/self/key/kernel^0_1" [id=1237, label="1237 DequantizeLinear_bert/encoder/layer_9/attention/self/key/kernel:0_1", type=DequantizeLinear]; -"1238 bert/encoder/layer_9/attention/self/key/MatMul" [id=1238, type=MatMul]; -"1239 bert/encoder/layer_9/attention/self/key/BiasAdd" [id=1239, type=Add]; -"1240 QuantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd^0_1" [id=1240, label="1240 QuantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; -"1241 DequantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd^0_1" [id=1241, label="1241 DequantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; -"1242 bert/encoder/layer_9/attention/self/Reshape_1" [id=1242, type=Reshape]; -"1243 bert/encoder/layer_9/attention/self/transpose_1" [id=1243, type=Transpose]; -"1244 bert/encoder/layer_9/attention/self/MatMul__432" [id=1244, type=Transpose]; -"1245 bert/encoder/layer_9/attention/self/MatMul" [id=1245, type=MatMul]; -"1246 bert/encoder/layer_9/attention/self/Mul" [id=1246, type=Mul]; -"1247 bert/encoder/layer_9/attention/self/add" [id=1247, type=Add]; -"1248 bert/encoder/layer_9/attention/self/Softmax" [id=1248, type=Softmax]; -"1249 bert/encoder/layer_9/attention/self/MatMul_1" [id=1249, type=MatMul]; -"1250 QuantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1^0_1" [id=1250, label="1250 QuantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1:0_1", type=QuantizeLinear]; -"1251 DequantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1^0_1" [id=1251, label="1251 DequantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1:0_1", type=DequantizeLinear]; -"1252 bert/encoder/layer_9/attention/self/transpose_3" [id=1252, type=Transpose]; -"1253 bert/encoder/layer_9/attention/self/Reshape_3" [id=1253, type=Reshape]; -"1254 QuantizeLinear_bert/encoder/layer_9/attention/output/dense/kernel^0_1" [id=1254, label="1254 QuantizeLinear_bert/encoder/layer_9/attention/output/dense/kernel:0_1", type=QuantizeLinear]; -"1255 DequantizeLinear_bert/encoder/layer_9/attention/output/dense/kernel^0_1" [id=1255, label="1255 DequantizeLinear_bert/encoder/layer_9/attention/output/dense/kernel:0_1", type=DequantizeLinear]; -"1256 bert/encoder/layer_9/attention/output/dense/MatMul" [id=1256, type=MatMul]; -"1257 bert/encoder/layer_9/attention/output/dense/BiasAdd" [id=1257, type=Add]; -"1258 bert/encoder/layer_9/attention/output/add" [id=1258, type=Add]; -"1259 bert/encoder/layer_9/attention/output/LayerNorm/moments/mean" [id=1259, type=ReduceMean]; -"1260 bert/encoder/layer_9/attention/output/LayerNorm/moments/StopGradient" [id=1260, type=Identity]; -"1261 bert/encoder/layer_9/attention/output/LayerNorm/moments/SquaredDifference" [id=1261, type=Sub]; -"1262 bert/encoder/layer_9/attention/output/LayerNorm/moments/SquaredDifference__435" [id=1262, type=Mul]; -"1263 bert/encoder/layer_9/attention/output/LayerNorm/moments/variance" [id=1263, type=ReduceMean]; -"1264 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add" [id=1264, type=Add]; -"1265 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/Rsqrt" [id=1265, type=Sqrt]; -"1266 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/Rsqrt__437" [id=1266, type=Reciprocal]; -"1267 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul" [id=1267, type=Mul]; -"1268 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_2" [id=1268, type=Mul]; -"1269 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/sub" [id=1269, type=Sub]; -"1270 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_1" [id=1270, type=Mul]; -"1271 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1" [id=1271, type=Add]; -"1272 QuantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1272, label="1272 QuantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"1273 DequantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1273, label="1273 DequantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"1274 QuantizeLinear_bert/encoder/layer_9/intermediate/dense/kernel^0_1" [id=1274, label="1274 QuantizeLinear_bert/encoder/layer_9/intermediate/dense/kernel:0_1", type=QuantizeLinear]; -"1275 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/kernel^0_1" [id=1275, label="1275 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/kernel:0_1", type=DequantizeLinear]; -"1276 bert/encoder/layer_9/intermediate/dense/MatMul" [id=1276, type=MatMul]; -"1277 bert/encoder/layer_9/intermediate/dense/BiasAdd" [id=1277, type=Add]; -"1278 bert/encoder/layer_9/intermediate/dense/Pow" [id=1278, type=Pow]; -"1279 bert/encoder/layer_9/intermediate/dense/mul" [id=1279, type=Mul]; -"1280 bert/encoder/layer_9/intermediate/dense/add" [id=1280, type=Add]; -"1281 bert/encoder/layer_9/intermediate/dense/mul_1" [id=1281, type=Mul]; -"1282 bert/encoder/layer_9/intermediate/dense/Tanh" [id=1282, type=Tanh]; -"1283 bert/encoder/layer_9/intermediate/dense/add_1" [id=1283, type=Add]; -"1284 bert/encoder/layer_9/intermediate/dense/mul_2" [id=1284, type=Mul]; -"1285 bert/encoder/layer_9/intermediate/dense/mul_3" [id=1285, type=Mul]; -"1286 QuantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3^0_1" [id=1286, label="1286 QuantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; -"1287 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3^0_1" [id=1287, label="1287 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; -"1288 QuantizeLinear_bert/encoder/layer_9/output/dense/kernel^0_1" [id=1288, label="1288 QuantizeLinear_bert/encoder/layer_9/output/dense/kernel:0_1", type=QuantizeLinear]; -"1289 DequantizeLinear_bert/encoder/layer_9/output/dense/kernel^0_1" [id=1289, label="1289 DequantizeLinear_bert/encoder/layer_9/output/dense/kernel:0_1", type=DequantizeLinear]; -"1290 bert/encoder/layer_9/output/dense/MatMul" [id=1290, type=MatMul]; -"1291 bert/encoder/layer_9/output/dense/BiasAdd" [id=1291, type=Add]; -"1292 bert/encoder/layer_9/output/add" [id=1292, type=Add]; -"1293 bert/encoder/layer_9/output/LayerNorm/moments/mean" [id=1293, type=ReduceMean]; -"1294 bert/encoder/layer_9/output/LayerNorm/moments/StopGradient" [id=1294, type=Identity]; -"1295 bert/encoder/layer_9/output/LayerNorm/moments/SquaredDifference" [id=1295, type=Sub]; -"1296 bert/encoder/layer_9/output/LayerNorm/moments/SquaredDifference__439" [id=1296, type=Mul]; -"1297 bert/encoder/layer_9/output/LayerNorm/moments/variance" [id=1297, type=ReduceMean]; -"1298 bert/encoder/layer_9/output/LayerNorm/batchnorm/add" [id=1298, type=Add]; -"1299 bert/encoder/layer_9/output/LayerNorm/batchnorm/Rsqrt" [id=1299, type=Sqrt]; -"1300 bert/encoder/layer_9/output/LayerNorm/batchnorm/Rsqrt__441" [id=1300, type=Reciprocal]; -"1301 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul" [id=1301, type=Mul]; -"1302 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_2" [id=1302, type=Mul]; -"1303 bert/encoder/layer_9/output/LayerNorm/batchnorm/sub" [id=1303, type=Sub]; -"1304 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_1" [id=1304, type=Mul]; -"1305 bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1" [id=1305, type=Add]; -"1306 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_1" [id=1306, label="1306 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"1307 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_1" [id=1307, label="1307 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"1308 QuantizeLinear_bert/encoder/layer_10/attention/self/value/kernel^0_1" [id=1308, label="1308 QuantizeLinear_bert/encoder/layer_10/attention/self/value/kernel:0_1", type=QuantizeLinear]; -"1309 DequantizeLinear_bert/encoder/layer_10/attention/self/value/kernel^0_1" [id=1309, label="1309 DequantizeLinear_bert/encoder/layer_10/attention/self/value/kernel:0_1", type=DequantizeLinear]; -"1310 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_2" [id=1310, label="1310 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; -"1311 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_2" [id=1311, label="1311 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; -"1312 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_3" [id=1312, label="1312 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; -"1313 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_3" [id=1313, label="1313 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; -"1314 bert/encoder/layer_10/attention/self/value/MatMul" [id=1314, type=MatMul]; -"1315 bert/encoder/layer_10/attention/self/value/BiasAdd" [id=1315, type=Add]; -"1316 bert/encoder/layer_10/attention/self/Reshape_2" [id=1316, type=Reshape]; -"1317 bert/encoder/layer_10/attention/self/transpose_2" [id=1317, type=Transpose]; -"1318 QuantizeLinear_bert/encoder/layer_10/attention/self/query/kernel^0_1" [id=1318, label="1318 QuantizeLinear_bert/encoder/layer_10/attention/self/query/kernel:0_1", type=QuantizeLinear]; -"1319 DequantizeLinear_bert/encoder/layer_10/attention/self/query/kernel^0_1" [id=1319, label="1319 DequantizeLinear_bert/encoder/layer_10/attention/self/query/kernel:0_1", type=DequantizeLinear]; -"1320 bert/encoder/layer_10/attention/self/query/MatMul" [id=1320, type=MatMul]; -"1321 bert/encoder/layer_10/attention/self/query/BiasAdd" [id=1321, type=Add]; -"1322 QuantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd^0_1" [id=1322, label="1322 QuantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; -"1323 DequantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd^0_1" [id=1323, label="1323 DequantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; -"1324 bert/encoder/layer_10/attention/self/Reshape" [id=1324, type=Reshape]; -"1325 bert/encoder/layer_10/attention/self/transpose" [id=1325, type=Transpose]; -"1326 QuantizeLinear_bert/encoder/layer_10/attention/self/key/kernel^0_1" [id=1326, label="1326 QuantizeLinear_bert/encoder/layer_10/attention/self/key/kernel:0_1", type=QuantizeLinear]; -"1327 DequantizeLinear_bert/encoder/layer_10/attention/self/key/kernel^0_1" [id=1327, label="1327 DequantizeLinear_bert/encoder/layer_10/attention/self/key/kernel:0_1", type=DequantizeLinear]; -"1328 bert/encoder/layer_10/attention/self/key/MatMul" [id=1328, type=MatMul]; -"1329 bert/encoder/layer_10/attention/self/key/BiasAdd" [id=1329, type=Add]; -"1330 QuantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd^0_1" [id=1330, label="1330 QuantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; -"1331 DequantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd^0_1" [id=1331, label="1331 DequantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; -"1332 bert/encoder/layer_10/attention/self/Reshape_1" [id=1332, type=Reshape]; -"1333 bert/encoder/layer_10/attention/self/transpose_1" [id=1333, type=Transpose]; -"1334 bert/encoder/layer_10/attention/self/MatMul__446" [id=1334, type=Transpose]; -"1335 bert/encoder/layer_10/attention/self/MatMul" [id=1335, type=MatMul]; -"1336 bert/encoder/layer_10/attention/self/Mul" [id=1336, type=Mul]; -"1337 bert/encoder/layer_10/attention/self/add" [id=1337, type=Add]; -"1338 bert/encoder/layer_10/attention/self/Softmax" [id=1338, type=Softmax]; -"1339 bert/encoder/layer_10/attention/self/MatMul_1" [id=1339, type=MatMul]; -"1340 QuantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1^0_1" [id=1340, label="1340 QuantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1:0_1", type=QuantizeLinear]; -"1341 DequantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1^0_1" [id=1341, label="1341 DequantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1:0_1", type=DequantizeLinear]; -"1342 bert/encoder/layer_10/attention/self/transpose_3" [id=1342, type=Transpose]; -"1343 bert/encoder/layer_10/attention/self/Reshape_3" [id=1343, type=Reshape]; -"1344 QuantizeLinear_bert/encoder/layer_10/attention/output/dense/kernel^0_1" [id=1344, label="1344 QuantizeLinear_bert/encoder/layer_10/attention/output/dense/kernel:0_1", type=QuantizeLinear]; -"1345 DequantizeLinear_bert/encoder/layer_10/attention/output/dense/kernel^0_1" [id=1345, label="1345 DequantizeLinear_bert/encoder/layer_10/attention/output/dense/kernel:0_1", type=DequantizeLinear]; -"1346 bert/encoder/layer_10/attention/output/dense/MatMul" [id=1346, type=MatMul]; -"1347 bert/encoder/layer_10/attention/output/dense/BiasAdd" [id=1347, type=Add]; -"1348 bert/encoder/layer_10/attention/output/add" [id=1348, type=Add]; -"1349 bert/encoder/layer_10/attention/output/LayerNorm/moments/mean" [id=1349, type=ReduceMean]; -"1350 bert/encoder/layer_10/attention/output/LayerNorm/moments/StopGradient" [id=1350, type=Identity]; -"1351 bert/encoder/layer_10/attention/output/LayerNorm/moments/SquaredDifference" [id=1351, type=Sub]; -"1352 bert/encoder/layer_10/attention/output/LayerNorm/moments/SquaredDifference__449" [id=1352, type=Mul]; -"1353 bert/encoder/layer_10/attention/output/LayerNorm/moments/variance" [id=1353, type=ReduceMean]; -"1354 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add" [id=1354, type=Add]; -"1355 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/Rsqrt" [id=1355, type=Sqrt]; -"1356 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/Rsqrt__451" [id=1356, type=Reciprocal]; -"1357 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul" [id=1357, type=Mul]; -"1358 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_2" [id=1358, type=Mul]; -"1359 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/sub" [id=1359, type=Sub]; -"1360 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_1" [id=1360, type=Mul]; -"1361 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1" [id=1361, type=Add]; -"1362 QuantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1362, label="1362 QuantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"1363 DequantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1363, label="1363 DequantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"1364 QuantizeLinear_bert/encoder/layer_10/intermediate/dense/kernel^0_1" [id=1364, label="1364 QuantizeLinear_bert/encoder/layer_10/intermediate/dense/kernel:0_1", type=QuantizeLinear]; -"1365 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/kernel^0_1" [id=1365, label="1365 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/kernel:0_1", type=DequantizeLinear]; -"1366 bert/encoder/layer_10/intermediate/dense/MatMul" [id=1366, type=MatMul]; -"1367 bert/encoder/layer_10/intermediate/dense/BiasAdd" [id=1367, type=Add]; -"1368 bert/encoder/layer_10/intermediate/dense/Pow" [id=1368, type=Pow]; -"1369 bert/encoder/layer_10/intermediate/dense/mul" [id=1369, type=Mul]; -"1370 bert/encoder/layer_10/intermediate/dense/add" [id=1370, type=Add]; -"1371 bert/encoder/layer_10/intermediate/dense/mul_1" [id=1371, type=Mul]; -"1372 bert/encoder/layer_10/intermediate/dense/Tanh" [id=1372, type=Tanh]; -"1373 bert/encoder/layer_10/intermediate/dense/add_1" [id=1373, type=Add]; -"1374 bert/encoder/layer_10/intermediate/dense/mul_2" [id=1374, type=Mul]; -"1375 bert/encoder/layer_10/intermediate/dense/mul_3" [id=1375, type=Mul]; -"1376 QuantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3^0_1" [id=1376, label="1376 QuantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; -"1377 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3^0_1" [id=1377, label="1377 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; -"1378 QuantizeLinear_bert/encoder/layer_10/output/dense/kernel^0_1" [id=1378, label="1378 QuantizeLinear_bert/encoder/layer_10/output/dense/kernel:0_1", type=QuantizeLinear]; -"1379 DequantizeLinear_bert/encoder/layer_10/output/dense/kernel^0_1" [id=1379, label="1379 DequantizeLinear_bert/encoder/layer_10/output/dense/kernel:0_1", type=DequantizeLinear]; -"1380 bert/encoder/layer_10/output/dense/MatMul" [id=1380, type=MatMul]; -"1381 bert/encoder/layer_10/output/dense/BiasAdd" [id=1381, type=Add]; -"1382 bert/encoder/layer_10/output/add" [id=1382, type=Add]; -"1383 bert/encoder/layer_10/output/LayerNorm/moments/mean" [id=1383, type=ReduceMean]; -"1384 bert/encoder/layer_10/output/LayerNorm/moments/StopGradient" [id=1384, type=Identity]; -"1385 bert/encoder/layer_10/output/LayerNorm/moments/SquaredDifference" [id=1385, type=Sub]; -"1386 bert/encoder/layer_10/output/LayerNorm/moments/SquaredDifference__453" [id=1386, type=Mul]; -"1387 bert/encoder/layer_10/output/LayerNorm/moments/variance" [id=1387, type=ReduceMean]; -"1388 bert/encoder/layer_10/output/LayerNorm/batchnorm/add" [id=1388, type=Add]; -"1389 bert/encoder/layer_10/output/LayerNorm/batchnorm/Rsqrt" [id=1389, type=Sqrt]; -"1390 bert/encoder/layer_10/output/LayerNorm/batchnorm/Rsqrt__455" [id=1390, type=Reciprocal]; -"1391 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul" [id=1391, type=Mul]; -"1392 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_2" [id=1392, type=Mul]; -"1393 bert/encoder/layer_10/output/LayerNorm/batchnorm/sub" [id=1393, type=Sub]; -"1394 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_1" [id=1394, type=Mul]; -"1395 bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1" [id=1395, type=Add]; -"1396 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_1" [id=1396, label="1396 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"1397 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_1" [id=1397, label="1397 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"1398 QuantizeLinear_bert/encoder/layer_11/attention/self/value/kernel^0_1" [id=1398, label="1398 QuantizeLinear_bert/encoder/layer_11/attention/self/value/kernel:0_1", type=QuantizeLinear]; -"1399 DequantizeLinear_bert/encoder/layer_11/attention/self/value/kernel^0_1" [id=1399, label="1399 DequantizeLinear_bert/encoder/layer_11/attention/self/value/kernel:0_1", type=DequantizeLinear]; -"1400 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_2" [id=1400, label="1400 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; -"1401 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_2" [id=1401, label="1401 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; -"1402 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_3" [id=1402, label="1402 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; -"1403 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_3" [id=1403, label="1403 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; -"1404 bert/encoder/layer_11/attention/self/value/MatMul" [id=1404, type=MatMul]; -"1405 bert/encoder/layer_11/attention/self/value/BiasAdd" [id=1405, type=Add]; -"1406 bert/encoder/layer_11/attention/self/Reshape_2" [id=1406, type=Reshape]; -"1407 bert/encoder/layer_11/attention/self/transpose_2" [id=1407, type=Transpose]; -"1408 QuantizeLinear_bert/encoder/layer_11/attention/self/query/kernel^0_1" [id=1408, label="1408 QuantizeLinear_bert/encoder/layer_11/attention/self/query/kernel:0_1", type=QuantizeLinear]; -"1409 DequantizeLinear_bert/encoder/layer_11/attention/self/query/kernel^0_1" [id=1409, label="1409 DequantizeLinear_bert/encoder/layer_11/attention/self/query/kernel:0_1", type=DequantizeLinear]; -"1410 bert/encoder/layer_11/attention/self/query/MatMul" [id=1410, type=MatMul]; -"1411 bert/encoder/layer_11/attention/self/query/BiasAdd" [id=1411, type=Add]; -"1412 QuantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd^0_1" [id=1412, label="1412 QuantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; -"1413 DequantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd^0_1" [id=1413, label="1413 DequantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; -"1414 bert/encoder/layer_11/attention/self/Reshape" [id=1414, type=Reshape]; -"1415 bert/encoder/layer_11/attention/self/transpose" [id=1415, type=Transpose]; -"1416 QuantizeLinear_bert/encoder/layer_11/attention/self/key/kernel^0_1" [id=1416, label="1416 QuantizeLinear_bert/encoder/layer_11/attention/self/key/kernel:0_1", type=QuantizeLinear]; -"1417 DequantizeLinear_bert/encoder/layer_11/attention/self/key/kernel^0_1" [id=1417, label="1417 DequantizeLinear_bert/encoder/layer_11/attention/self/key/kernel:0_1", type=DequantizeLinear]; -"1418 bert/encoder/layer_11/attention/self/key/MatMul" [id=1418, type=MatMul]; -"1419 bert/encoder/layer_11/attention/self/key/BiasAdd" [id=1419, type=Add]; -"1420 QuantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd^0_1" [id=1420, label="1420 QuantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; -"1421 DequantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd^0_1" [id=1421, label="1421 DequantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; -"1422 bert/encoder/layer_11/attention/self/Reshape_1" [id=1422, type=Reshape]; -"1423 bert/encoder/layer_11/attention/self/transpose_1" [id=1423, type=Transpose]; -"1424 bert/encoder/layer_11/attention/self/MatMul__460" [id=1424, type=Transpose]; -"1425 bert/encoder/layer_11/attention/self/MatMul" [id=1425, type=MatMul]; -"1426 bert/encoder/layer_11/attention/self/Mul" [id=1426, type=Mul]; -"1427 bert/encoder/layer_11/attention/self/add" [id=1427, type=Add]; -"1428 bert/encoder/layer_11/attention/self/Softmax" [id=1428, type=Softmax]; -"1429 bert/encoder/layer_11/attention/self/MatMul_1" [id=1429, type=MatMul]; -"1430 QuantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1^0_1" [id=1430, label="1430 QuantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1:0_1", type=QuantizeLinear]; -"1431 DequantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1^0_1" [id=1431, label="1431 DequantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1:0_1", type=DequantizeLinear]; -"1432 bert/encoder/layer_11/attention/self/transpose_3" [id=1432, type=Transpose]; -"1433 bert/encoder/layer_11/attention/self/Reshape_3" [id=1433, type=Reshape]; -"1434 QuantizeLinear_bert/encoder/layer_11/attention/output/dense/kernel^0_1" [id=1434, label="1434 QuantizeLinear_bert/encoder/layer_11/attention/output/dense/kernel:0_1", type=QuantizeLinear]; -"1435 DequantizeLinear_bert/encoder/layer_11/attention/output/dense/kernel^0_1" [id=1435, label="1435 DequantizeLinear_bert/encoder/layer_11/attention/output/dense/kernel:0_1", type=DequantizeLinear]; -"1436 bert/encoder/layer_11/attention/output/dense/MatMul" [id=1436, type=MatMul]; -"1437 bert/encoder/layer_11/attention/output/dense/BiasAdd" [id=1437, type=Add]; -"1438 bert/encoder/layer_11/attention/output/add" [id=1438, type=Add]; -"1439 bert/encoder/layer_11/attention/output/LayerNorm/moments/mean" [id=1439, type=ReduceMean]; -"1440 bert/encoder/layer_11/attention/output/LayerNorm/moments/StopGradient" [id=1440, type=Identity]; -"1441 bert/encoder/layer_11/attention/output/LayerNorm/moments/SquaredDifference" [id=1441, type=Sub]; -"1442 bert/encoder/layer_11/attention/output/LayerNorm/moments/SquaredDifference__463" [id=1442, type=Mul]; -"1443 bert/encoder/layer_11/attention/output/LayerNorm/moments/variance" [id=1443, type=ReduceMean]; -"1444 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add" [id=1444, type=Add]; -"1445 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/Rsqrt" [id=1445, type=Sqrt]; -"1446 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/Rsqrt__465" [id=1446, type=Reciprocal]; -"1447 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul" [id=1447, type=Mul]; -"1448 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_2" [id=1448, type=Mul]; -"1449 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/sub" [id=1449, type=Sub]; -"1450 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_1" [id=1450, type=Mul]; -"1451 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1" [id=1451, type=Add]; -"1452 QuantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1452, label="1452 QuantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"1453 DequantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1453, label="1453 DequantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"1454 QuantizeLinear_bert/encoder/layer_11/intermediate/dense/kernel^0_1" [id=1454, label="1454 QuantizeLinear_bert/encoder/layer_11/intermediate/dense/kernel:0_1", type=QuantizeLinear]; -"1455 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/kernel^0_1" [id=1455, label="1455 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/kernel:0_1", type=DequantizeLinear]; -"1456 bert/encoder/layer_11/intermediate/dense/MatMul" [id=1456, type=MatMul]; -"1457 bert/encoder/layer_11/intermediate/dense/BiasAdd" [id=1457, type=Add]; -"1458 bert/encoder/layer_11/intermediate/dense/Pow" [id=1458, type=Pow]; -"1459 bert/encoder/layer_11/intermediate/dense/mul" [id=1459, type=Mul]; -"1460 bert/encoder/layer_11/intermediate/dense/add" [id=1460, type=Add]; -"1461 bert/encoder/layer_11/intermediate/dense/mul_1" [id=1461, type=Mul]; -"1462 bert/encoder/layer_11/intermediate/dense/Tanh" [id=1462, type=Tanh]; -"1463 bert/encoder/layer_11/intermediate/dense/add_1" [id=1463, type=Add]; -"1464 bert/encoder/layer_11/intermediate/dense/mul_2" [id=1464, type=Mul]; -"1465 bert/encoder/layer_11/intermediate/dense/mul_3" [id=1465, type=Mul]; -"1466 QuantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3^0_1" [id=1466, label="1466 QuantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; -"1467 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3^0_1" [id=1467, label="1467 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; -"1468 QuantizeLinear_bert/encoder/layer_11/output/dense/kernel^0_1" [id=1468, label="1468 QuantizeLinear_bert/encoder/layer_11/output/dense/kernel:0_1", type=QuantizeLinear]; -"1469 DequantizeLinear_bert/encoder/layer_11/output/dense/kernel^0_1" [id=1469, label="1469 DequantizeLinear_bert/encoder/layer_11/output/dense/kernel:0_1", type=DequantizeLinear]; -"1470 bert/encoder/layer_11/output/dense/MatMul" [id=1470, type=MatMul]; -"1471 bert/encoder/layer_11/output/dense/BiasAdd" [id=1471, type=Add]; -"1472 bert/encoder/layer_11/output/add" [id=1472, type=Add]; -"1473 bert/encoder/layer_11/output/LayerNorm/moments/mean" [id=1473, type=ReduceMean]; -"1474 bert/encoder/layer_11/output/LayerNorm/moments/StopGradient" [id=1474, type=Identity]; -"1475 bert/encoder/layer_11/output/LayerNorm/moments/SquaredDifference" [id=1475, type=Sub]; -"1476 bert/encoder/layer_11/output/LayerNorm/moments/SquaredDifference__467" [id=1476, type=Mul]; -"1477 bert/encoder/layer_11/output/LayerNorm/moments/variance" [id=1477, type=ReduceMean]; -"1478 bert/encoder/layer_11/output/LayerNorm/batchnorm/add" [id=1478, type=Add]; -"1479 bert/encoder/layer_11/output/LayerNorm/batchnorm/Rsqrt" [id=1479, type=Sqrt]; -"1480 bert/encoder/layer_11/output/LayerNorm/batchnorm/Rsqrt__469" [id=1480, type=Reciprocal]; -"1481 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul" [id=1481, type=Mul]; -"1482 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_2" [id=1482, type=Mul]; -"1483 bert/encoder/layer_11/output/LayerNorm/batchnorm/sub" [id=1483, type=Sub]; -"1484 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_1" [id=1484, type=Mul]; -"1485 bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1" [id=1485, type=Add]; -"1486 QuantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1^0_1" [id=1486, label="1486 QuantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; -"1487 DequantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1^0_1" [id=1487, label="1487 DequantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; -"1488 bert/encoder/Reshape_13" [id=1488, type=Reshape]; -"1489 Shape_1" [id=1489, type=Shape]; -"1490 Shape_1__472" [id=1490, type=Cast]; -"1491 strided_slice_1" [id=1491, type=Slice]; -"1492 strided_slice_1__476" [id=1492, type=Squeeze]; -"1493 strided_slice_1__477" [id=1493, type=Cast]; -"1494 mul" [id=1494, type=Mul]; -"1495 Reshape/shape_Unsqueeze__482" [id=1495, type=Unsqueeze]; -"1496 Reshape/shape_Concat__484" [id=1496, type=Concat]; -"1497 Reshape__485" [id=1497, type=Cast]; -"1498 Reshape_1/shape_Unsqueeze__478" [id=1498, type=Unsqueeze]; -"1499 Reshape_1/shape_Concat__481" [id=1499, type=Concat]; -"1500 Reshape_1__487" [id=1500, type=Cast]; -"1501 Reshape" [id=1501, type=Reshape]; -"1502 QuantizeLinear_MatMul__486^0_1" [id=1502, label="1502 QuantizeLinear_MatMul__486:0_1", type=QuantizeLinear]; -"1503 DequantizeLinear_MatMul__486^0_1" [id=1503, label="1503 DequantizeLinear_MatMul__486:0_1", type=DequantizeLinear]; -"1504 MatMul" [id=1504, type=MatMul]; -"1505 BiasAdd" [id=1505, type=Add]; -"1506 Reshape_1" [id=1506, type=Reshape]; -"1507 transpose" [id=1507, type=Transpose]; -"1508 unstack" [id=1508, type=Split]; -"1509 unstack__490" [id=1509, type=Squeeze]; -"1510 unstack_graph_outputs_Identity__4" [id=1510, type=Identity]; -"1511 unstack__488" [id=1511, type=Squeeze]; -"1512 unstack_graph_outputs_Identity__7" [id=1512, type=Identity]; -"1513 nncf_model_input_0" [id=1513, type=nncf_model_input]; -"1514 nncf_model_input_1" [id=1514, type=nncf_model_input]; -"1515 nncf_model_input_2" [id=1515, type=nncf_model_input]; -"1516 nncf_model_input_3" [id=1516, type=nncf_model_input]; -"1517 nncf_model_output_0" [id=1517, type=nncf_model_output]; -"1518 nncf_model_output_1" [id=1518, type=nncf_model_output]; -"1519 nncf_model_output_2" [id=1519, type=nncf_model_output]; -"0 unique_ids_graph_outputs_Identity__10" -> "1519 nncf_model_output_2" [label="[-1]", style=dashed]; -"1 bert/encoder/ones/packed_Unsqueeze__20" -> "129 bert/encoder/ones/packed_Concat__21" [label="[1]", style=dashed]; -"2 bert/encoder/ones/packed_Unsqueeze__19" -> "129 bert/encoder/ones/packed_Concat__21" [label="[1]", style=dashed]; -"3 bert/encoder/layer_9/attention/self/Reshape_3/shape_Unsqueeze__83" -> "248 bert/encoder/layer_9/attention/self/Reshape_3/shape_Concat__84" [label="[1]", style=dashed]; -"4 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__88" -> "251 bert/encoder/layer_9/attention/self/Reshape_2/shape_Concat__89" [label="[1]", style=dashed]; -"5 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__87" -> "251 bert/encoder/layer_9/attention/self/Reshape_2/shape_Concat__89" [label="[1]", style=dashed]; -"6 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__86" -> "251 bert/encoder/layer_9/attention/self/Reshape_2/shape_Concat__89" [label="[1]", style=dashed]; -"7 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__93" -> "254 bert/encoder/layer_9/attention/self/Reshape_1/shape_Concat__94" [label="[1]", style=dashed]; -"8 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__92" -> "254 bert/encoder/layer_9/attention/self/Reshape_1/shape_Concat__94" [label="[1]", style=dashed]; -"9 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__91" -> "254 bert/encoder/layer_9/attention/self/Reshape_1/shape_Concat__94" [label="[1]", style=dashed]; -"10 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__98" -> "257 bert/encoder/layer_9/attention/self/Reshape/shape_Concat__99" [label="[1]", style=dashed]; -"11 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__97" -> "257 bert/encoder/layer_9/attention/self/Reshape/shape_Concat__99" [label="[1]", style=dashed]; -"12 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__96" -> "257 bert/encoder/layer_9/attention/self/Reshape/shape_Concat__99" [label="[1]", style=dashed]; -"13 bert/encoder/layer_8/attention/self/Reshape_3/shape_Unsqueeze__101" -> "261 bert/encoder/layer_8/attention/self/Reshape_3/shape_Concat__102" [label="[1]", style=dashed]; -"14 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__106" -> "264 bert/encoder/layer_8/attention/self/Reshape_2/shape_Concat__107" [label="[1]", style=dashed]; -"15 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__105" -> "264 bert/encoder/layer_8/attention/self/Reshape_2/shape_Concat__107" [label="[1]", style=dashed]; -"16 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__104" -> "264 bert/encoder/layer_8/attention/self/Reshape_2/shape_Concat__107" [label="[1]", style=dashed]; -"17 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__111" -> "267 bert/encoder/layer_8/attention/self/Reshape_1/shape_Concat__112" [label="[1]", style=dashed]; -"18 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__110" -> "267 bert/encoder/layer_8/attention/self/Reshape_1/shape_Concat__112" [label="[1]", style=dashed]; -"19 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__109" -> "267 bert/encoder/layer_8/attention/self/Reshape_1/shape_Concat__112" [label="[1]", style=dashed]; -"20 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__116" -> "270 bert/encoder/layer_8/attention/self/Reshape/shape_Concat__117" [label="[1]", style=dashed]; -"21 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__115" -> "270 bert/encoder/layer_8/attention/self/Reshape/shape_Concat__117" [label="[1]", style=dashed]; -"22 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__114" -> "270 bert/encoder/layer_8/attention/self/Reshape/shape_Concat__117" [label="[1]", style=dashed]; -"23 bert/encoder/layer_7/attention/self/Reshape_3/shape_Unsqueeze__119" -> "274 bert/encoder/layer_7/attention/self/Reshape_3/shape_Concat__120" [label="[1]", style=dashed]; -"24 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__124" -> "277 bert/encoder/layer_7/attention/self/Reshape_2/shape_Concat__125" [label="[1]", style=dashed]; -"25 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__123" -> "277 bert/encoder/layer_7/attention/self/Reshape_2/shape_Concat__125" [label="[1]", style=dashed]; -"26 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__122" -> "277 bert/encoder/layer_7/attention/self/Reshape_2/shape_Concat__125" [label="[1]", style=dashed]; -"27 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__129" -> "280 bert/encoder/layer_7/attention/self/Reshape_1/shape_Concat__130" [label="[1]", style=dashed]; -"28 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__128" -> "280 bert/encoder/layer_7/attention/self/Reshape_1/shape_Concat__130" [label="[1]", style=dashed]; -"29 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__127" -> "280 bert/encoder/layer_7/attention/self/Reshape_1/shape_Concat__130" [label="[1]", style=dashed]; -"30 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__134" -> "283 bert/encoder/layer_7/attention/self/Reshape/shape_Concat__135" [label="[1]", style=dashed]; -"31 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__133" -> "283 bert/encoder/layer_7/attention/self/Reshape/shape_Concat__135" [label="[1]", style=dashed]; -"32 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__132" -> "283 bert/encoder/layer_7/attention/self/Reshape/shape_Concat__135" [label="[1]", style=dashed]; -"33 bert/encoder/layer_6/attention/self/Reshape_3/shape_Unsqueeze__137" -> "287 bert/encoder/layer_6/attention/self/Reshape_3/shape_Concat__138" [label="[1]", style=dashed]; -"34 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__142" -> "290 bert/encoder/layer_6/attention/self/Reshape_2/shape_Concat__143" [label="[1]", style=dashed]; -"35 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__141" -> "290 bert/encoder/layer_6/attention/self/Reshape_2/shape_Concat__143" [label="[1]", style=dashed]; -"36 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__140" -> "290 bert/encoder/layer_6/attention/self/Reshape_2/shape_Concat__143" [label="[1]", style=dashed]; -"37 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__147" -> "293 bert/encoder/layer_6/attention/self/Reshape_1/shape_Concat__148" [label="[1]", style=dashed]; -"38 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__146" -> "293 bert/encoder/layer_6/attention/self/Reshape_1/shape_Concat__148" [label="[1]", style=dashed]; -"39 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__145" -> "293 bert/encoder/layer_6/attention/self/Reshape_1/shape_Concat__148" [label="[1]", style=dashed]; -"40 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__152" -> "296 bert/encoder/layer_6/attention/self/Reshape/shape_Concat__153" [label="[1]", style=dashed]; -"41 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__151" -> "296 bert/encoder/layer_6/attention/self/Reshape/shape_Concat__153" [label="[1]", style=dashed]; -"42 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__150" -> "296 bert/encoder/layer_6/attention/self/Reshape/shape_Concat__153" [label="[1]", style=dashed]; -"43 bert/encoder/layer_5/attention/self/Reshape_3/shape_Unsqueeze__155" -> "300 bert/encoder/layer_5/attention/self/Reshape_3/shape_Concat__156" [label="[1]", style=dashed]; -"44 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__160" -> "303 bert/encoder/layer_5/attention/self/Reshape_2/shape_Concat__161" [label="[1]", style=dashed]; -"45 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__159" -> "303 bert/encoder/layer_5/attention/self/Reshape_2/shape_Concat__161" [label="[1]", style=dashed]; -"46 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__158" -> "303 bert/encoder/layer_5/attention/self/Reshape_2/shape_Concat__161" [label="[1]", style=dashed]; -"47 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__165" -> "306 bert/encoder/layer_5/attention/self/Reshape_1/shape_Concat__166" [label="[1]", style=dashed]; -"48 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__164" -> "306 bert/encoder/layer_5/attention/self/Reshape_1/shape_Concat__166" [label="[1]", style=dashed]; -"49 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__163" -> "306 bert/encoder/layer_5/attention/self/Reshape_1/shape_Concat__166" [label="[1]", style=dashed]; -"50 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__170" -> "309 bert/encoder/layer_5/attention/self/Reshape/shape_Concat__171" [label="[1]", style=dashed]; -"51 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__169" -> "309 bert/encoder/layer_5/attention/self/Reshape/shape_Concat__171" [label="[1]", style=dashed]; -"52 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__168" -> "309 bert/encoder/layer_5/attention/self/Reshape/shape_Concat__171" [label="[1]", style=dashed]; -"53 bert/encoder/layer_4/attention/self/Reshape_3/shape_Unsqueeze__173" -> "313 bert/encoder/layer_4/attention/self/Reshape_3/shape_Concat__174" [label="[1]", style=dashed]; -"54 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__178" -> "316 bert/encoder/layer_4/attention/self/Reshape_2/shape_Concat__179" [label="[1]", style=dashed]; -"55 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__177" -> "316 bert/encoder/layer_4/attention/self/Reshape_2/shape_Concat__179" [label="[1]", style=dashed]; -"56 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__176" -> "316 bert/encoder/layer_4/attention/self/Reshape_2/shape_Concat__179" [label="[1]", style=dashed]; -"57 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__183" -> "319 bert/encoder/layer_4/attention/self/Reshape_1/shape_Concat__184" [label="[1]", style=dashed]; -"58 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__182" -> "319 bert/encoder/layer_4/attention/self/Reshape_1/shape_Concat__184" [label="[1]", style=dashed]; -"59 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__181" -> "319 bert/encoder/layer_4/attention/self/Reshape_1/shape_Concat__184" [label="[1]", style=dashed]; -"60 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__188" -> "322 bert/encoder/layer_4/attention/self/Reshape/shape_Concat__189" [label="[1]", style=dashed]; -"61 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__187" -> "322 bert/encoder/layer_4/attention/self/Reshape/shape_Concat__189" [label="[1]", style=dashed]; -"62 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__186" -> "322 bert/encoder/layer_4/attention/self/Reshape/shape_Concat__189" [label="[1]", style=dashed]; -"63 bert/encoder/layer_3/attention/self/Reshape_3/shape_Unsqueeze__191" -> "326 bert/encoder/layer_3/attention/self/Reshape_3/shape_Concat__192" [label="[1]", style=dashed]; -"64 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__196" -> "329 bert/encoder/layer_3/attention/self/Reshape_2/shape_Concat__197" [label="[1]", style=dashed]; -"65 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__195" -> "329 bert/encoder/layer_3/attention/self/Reshape_2/shape_Concat__197" [label="[1]", style=dashed]; -"66 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__194" -> "329 bert/encoder/layer_3/attention/self/Reshape_2/shape_Concat__197" [label="[1]", style=dashed]; -"67 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__201" -> "332 bert/encoder/layer_3/attention/self/Reshape_1/shape_Concat__202" [label="[1]", style=dashed]; -"68 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__200" -> "332 bert/encoder/layer_3/attention/self/Reshape_1/shape_Concat__202" [label="[1]", style=dashed]; -"69 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__199" -> "332 bert/encoder/layer_3/attention/self/Reshape_1/shape_Concat__202" [label="[1]", style=dashed]; -"70 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__206" -> "335 bert/encoder/layer_3/attention/self/Reshape/shape_Concat__207" [label="[1]", style=dashed]; -"71 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__205" -> "335 bert/encoder/layer_3/attention/self/Reshape/shape_Concat__207" [label="[1]", style=dashed]; -"72 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__204" -> "335 bert/encoder/layer_3/attention/self/Reshape/shape_Concat__207" [label="[1]", style=dashed]; -"73 bert/encoder/layer_2/attention/self/Reshape_3/shape_Unsqueeze__209" -> "339 bert/encoder/layer_2/attention/self/Reshape_3/shape_Concat__210" [label="[1]", style=dashed]; -"74 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__214" -> "342 bert/encoder/layer_2/attention/self/Reshape_2/shape_Concat__215" [label="[1]", style=dashed]; -"75 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__213" -> "342 bert/encoder/layer_2/attention/self/Reshape_2/shape_Concat__215" [label="[1]", style=dashed]; -"76 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__212" -> "342 bert/encoder/layer_2/attention/self/Reshape_2/shape_Concat__215" [label="[1]", style=dashed]; -"77 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__219" -> "345 bert/encoder/layer_2/attention/self/Reshape_1/shape_Concat__220" [label="[1]", style=dashed]; -"78 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__218" -> "345 bert/encoder/layer_2/attention/self/Reshape_1/shape_Concat__220" [label="[1]", style=dashed]; -"79 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__217" -> "345 bert/encoder/layer_2/attention/self/Reshape_1/shape_Concat__220" [label="[1]", style=dashed]; -"80 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__224" -> "348 bert/encoder/layer_2/attention/self/Reshape/shape_Concat__225" [label="[1]", style=dashed]; -"81 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__223" -> "348 bert/encoder/layer_2/attention/self/Reshape/shape_Concat__225" [label="[1]", style=dashed]; -"82 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__222" -> "348 bert/encoder/layer_2/attention/self/Reshape/shape_Concat__225" [label="[1]", style=dashed]; -"83 bert/encoder/layer_11/attention/self/Reshape_3/shape_Unsqueeze__227" -> "352 bert/encoder/layer_11/attention/self/Reshape_3/shape_Concat__228" [label="[1]", style=dashed]; -"84 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__232" -> "355 bert/encoder/layer_11/attention/self/Reshape_2/shape_Concat__233" [label="[1]", style=dashed]; -"85 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__231" -> "355 bert/encoder/layer_11/attention/self/Reshape_2/shape_Concat__233" [label="[1]", style=dashed]; -"86 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__230" -> "355 bert/encoder/layer_11/attention/self/Reshape_2/shape_Concat__233" [label="[1]", style=dashed]; -"87 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__237" -> "358 bert/encoder/layer_11/attention/self/Reshape_1/shape_Concat__238" [label="[1]", style=dashed]; -"88 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__236" -> "358 bert/encoder/layer_11/attention/self/Reshape_1/shape_Concat__238" [label="[1]", style=dashed]; -"89 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__235" -> "358 bert/encoder/layer_11/attention/self/Reshape_1/shape_Concat__238" [label="[1]", style=dashed]; -"90 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__242" -> "361 bert/encoder/layer_11/attention/self/Reshape/shape_Concat__243" [label="[1]", style=dashed]; -"91 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__241" -> "361 bert/encoder/layer_11/attention/self/Reshape/shape_Concat__243" [label="[1]", style=dashed]; -"92 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__240" -> "361 bert/encoder/layer_11/attention/self/Reshape/shape_Concat__243" [label="[1]", style=dashed]; -"93 bert/encoder/layer_10/attention/self/Reshape_3/shape_Unsqueeze__245" -> "365 bert/encoder/layer_10/attention/self/Reshape_3/shape_Concat__246" [label="[1]", style=dashed]; -"94 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__250" -> "368 bert/encoder/layer_10/attention/self/Reshape_2/shape_Concat__251" [label="[1]", style=dashed]; -"95 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__249" -> "368 bert/encoder/layer_10/attention/self/Reshape_2/shape_Concat__251" [label="[1]", style=dashed]; -"96 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__248" -> "368 bert/encoder/layer_10/attention/self/Reshape_2/shape_Concat__251" [label="[1]", style=dashed]; -"97 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__255" -> "371 bert/encoder/layer_10/attention/self/Reshape_1/shape_Concat__256" [label="[1]", style=dashed]; -"98 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__254" -> "371 bert/encoder/layer_10/attention/self/Reshape_1/shape_Concat__256" [label="[1]", style=dashed]; -"99 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__253" -> "371 bert/encoder/layer_10/attention/self/Reshape_1/shape_Concat__256" [label="[1]", style=dashed]; -"100 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__260" -> "374 bert/encoder/layer_10/attention/self/Reshape/shape_Concat__261" [label="[1]", style=dashed]; -"101 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__259" -> "374 bert/encoder/layer_10/attention/self/Reshape/shape_Concat__261" [label="[1]", style=dashed]; -"102 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__258" -> "374 bert/encoder/layer_10/attention/self/Reshape/shape_Concat__261" [label="[1]", style=dashed]; -"103 bert/encoder/layer_1/attention/self/Reshape_3/shape_Unsqueeze__263" -> "378 bert/encoder/layer_1/attention/self/Reshape_3/shape_Concat__264" [label="[1]", style=dashed]; -"104 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__268" -> "381 bert/encoder/layer_1/attention/self/Reshape_2/shape_Concat__269" [label="[1]", style=dashed]; -"105 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__267" -> "381 bert/encoder/layer_1/attention/self/Reshape_2/shape_Concat__269" [label="[1]", style=dashed]; -"106 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__266" -> "381 bert/encoder/layer_1/attention/self/Reshape_2/shape_Concat__269" [label="[1]", style=dashed]; -"107 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__273" -> "384 bert/encoder/layer_1/attention/self/Reshape_1/shape_Concat__274" [label="[1]", style=dashed]; -"108 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__272" -> "384 bert/encoder/layer_1/attention/self/Reshape_1/shape_Concat__274" [label="[1]", style=dashed]; -"109 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__271" -> "384 bert/encoder/layer_1/attention/self/Reshape_1/shape_Concat__274" [label="[1]", style=dashed]; -"110 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__278" -> "387 bert/encoder/layer_1/attention/self/Reshape/shape_Concat__279" [label="[1]", style=dashed]; -"111 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__277" -> "387 bert/encoder/layer_1/attention/self/Reshape/shape_Concat__279" [label="[1]", style=dashed]; -"112 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__276" -> "387 bert/encoder/layer_1/attention/self/Reshape/shape_Concat__279" [label="[1]", style=dashed]; -"113 bert/encoder/layer_0/attention/self/Reshape_3/shape_Unsqueeze__281" -> "391 bert/encoder/layer_0/attention/self/Reshape_3/shape_Concat__282" [label="[1]", style=dashed]; -"114 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__286" -> "394 bert/encoder/layer_0/attention/self/Reshape_2/shape_Concat__287" [label="[1]", style=dashed]; -"115 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__285" -> "394 bert/encoder/layer_0/attention/self/Reshape_2/shape_Concat__287" [label="[1]", style=dashed]; -"116 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__284" -> "394 bert/encoder/layer_0/attention/self/Reshape_2/shape_Concat__287" [label="[1]", style=dashed]; -"117 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__291" -> "397 bert/encoder/layer_0/attention/self/Reshape_1/shape_Concat__292" [label="[1]", style=dashed]; -"118 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__290" -> "397 bert/encoder/layer_0/attention/self/Reshape_1/shape_Concat__292" [label="[1]", style=dashed]; -"119 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__289" -> "397 bert/encoder/layer_0/attention/self/Reshape_1/shape_Concat__292" [label="[1]", style=dashed]; -"120 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__296" -> "400 bert/encoder/layer_0/attention/self/Reshape/shape_Concat__297" [label="[1]", style=dashed]; -"121 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__295" -> "400 bert/encoder/layer_0/attention/self/Reshape/shape_Concat__297" [label="[1]", style=dashed]; -"122 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__294" -> "400 bert/encoder/layer_0/attention/self/Reshape/shape_Concat__297" [label="[1]", style=dashed]; -"123 bert/encoder/Shape" -> "124 bert/encoder/Shape__12" [label="[2]", style=dashed]; -"124 bert/encoder/Shape__12" -> "125 bert/encoder/strided_slice" [label="[2]", style=solid]; -"125 bert/encoder/strided_slice" -> "126 bert/encoder/strided_slice__16" [label="[1]", style=solid]; -"126 bert/encoder/strided_slice__16" -> "127 bert/encoder/strided_slice__17" [label="[]", style=solid]; -"127 bert/encoder/strided_slice__17" -> "128 bert/encoder/ones/packed_Unsqueeze__18" [label="[]", style=dashed]; -"127 bert/encoder/strided_slice__17" -> "135 bert/encoder/Reshape/shape_Unsqueeze__23" [label="[]", style=dashed]; -"128 bert/encoder/ones/packed_Unsqueeze__18" -> "129 bert/encoder/ones/packed_Concat__21" [label="[1]", style=dashed]; -"129 bert/encoder/ones/packed_Concat__21" -> "130 bert/encoder/ones__22" [label="[3]", style=dashed]; -"130 bert/encoder/ones__22" -> "131 bert/encoder/ones" [label="[3]", style=dashed]; -"131 bert/encoder/ones" -> "142 bert/encoder/mul" [label="[-1, -1, -1]", style=solid]; -"132 bert/encoder/Reshape_13/shape_Unsqueeze__300" -> "403 bert/encoder/Reshape_13/shape_Concat__301" [label="[1]", style=dashed]; -"133 bert/encoder/Reshape_13/shape_Unsqueeze__299" -> "403 bert/encoder/Reshape_13/shape_Concat__301" [label="[1]", style=dashed]; -"134 bert/encoder/Reshape_1__302" -> "405 bert/encoder/Reshape_1" [label="[2]", style=dashed]; -"135 bert/encoder/Reshape/shape_Unsqueeze__23" -> "138 bert/encoder/Reshape/shape_Concat__26" [label="[1]", style=dashed]; -"136 bert/encoder/Reshape/shape_Unsqueeze__25" -> "138 bert/encoder/Reshape/shape_Concat__26" [label="[1]", style=dashed]; -"137 bert/encoder/Reshape/shape_Unsqueeze__24" -> "138 bert/encoder/Reshape/shape_Concat__26" [label="[1]", style=dashed]; -"138 bert/encoder/Reshape/shape_Concat__26" -> "139 bert/encoder/Reshape__27" [label="[3]", style=dashed]; -"139 bert/encoder/Reshape__27" -> "140 bert/encoder/Reshape" [label="[3]", style=dashed]; -"140 bert/encoder/Reshape" -> "141 bert/encoder/Cast" [label="[]", style=dashed]; -"141 bert/encoder/Cast" -> "142 bert/encoder/mul" [label="[]", style=solid]; -"142 bert/encoder/mul" -> "143 bert/encoder/layer_9/attention/self/ExpandDims" [label="[]", style=solid]; -"142 bert/encoder/mul" -> "146 bert/encoder/layer_8/attention/self/ExpandDims" [label="[]", style=solid]; -"142 bert/encoder/mul" -> "149 bert/encoder/layer_7/attention/self/ExpandDims" [label="[]", style=solid]; -"142 bert/encoder/mul" -> "152 bert/encoder/layer_6/attention/self/ExpandDims" [label="[]", style=solid]; -"142 bert/encoder/mul" -> "155 bert/encoder/layer_5/attention/self/ExpandDims" [label="[]", style=solid]; -"142 bert/encoder/mul" -> "158 bert/encoder/layer_4/attention/self/ExpandDims" [label="[]", style=solid]; -"142 bert/encoder/mul" -> "161 bert/encoder/layer_3/attention/self/ExpandDims" [label="[]", style=solid]; -"142 bert/encoder/mul" -> "164 bert/encoder/layer_2/attention/self/ExpandDims" [label="[]", style=solid]; -"142 bert/encoder/mul" -> "167 bert/encoder/layer_11/attention/self/ExpandDims" [label="[]", style=solid]; -"142 bert/encoder/mul" -> "170 bert/encoder/layer_10/attention/self/ExpandDims" [label="[]", style=solid]; -"142 bert/encoder/mul" -> "173 bert/encoder/layer_1/attention/self/ExpandDims" [label="[]", style=solid]; -"142 bert/encoder/mul" -> "176 bert/encoder/layer_0/attention/self/ExpandDims" [label="[]", style=solid]; -"143 bert/encoder/layer_9/attention/self/ExpandDims" -> "144 bert/encoder/layer_9/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; -"144 bert/encoder/layer_9/attention/self/sub" -> "145 bert/encoder/layer_9/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; -"145 bert/encoder/layer_9/attention/self/mul_1" -> "1247 bert/encoder/layer_9/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; -"146 bert/encoder/layer_8/attention/self/ExpandDims" -> "147 bert/encoder/layer_8/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; -"147 bert/encoder/layer_8/attention/self/sub" -> "148 bert/encoder/layer_8/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; -"148 bert/encoder/layer_8/attention/self/mul_1" -> "1157 bert/encoder/layer_8/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; -"149 bert/encoder/layer_7/attention/self/ExpandDims" -> "150 bert/encoder/layer_7/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; -"150 bert/encoder/layer_7/attention/self/sub" -> "151 bert/encoder/layer_7/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; -"151 bert/encoder/layer_7/attention/self/mul_1" -> "1067 bert/encoder/layer_7/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; -"152 bert/encoder/layer_6/attention/self/ExpandDims" -> "153 bert/encoder/layer_6/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; -"153 bert/encoder/layer_6/attention/self/sub" -> "154 bert/encoder/layer_6/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; -"154 bert/encoder/layer_6/attention/self/mul_1" -> "977 bert/encoder/layer_6/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; -"155 bert/encoder/layer_5/attention/self/ExpandDims" -> "156 bert/encoder/layer_5/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; -"156 bert/encoder/layer_5/attention/self/sub" -> "157 bert/encoder/layer_5/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; -"157 bert/encoder/layer_5/attention/self/mul_1" -> "887 bert/encoder/layer_5/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; -"158 bert/encoder/layer_4/attention/self/ExpandDims" -> "159 bert/encoder/layer_4/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; -"159 bert/encoder/layer_4/attention/self/sub" -> "160 bert/encoder/layer_4/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; -"160 bert/encoder/layer_4/attention/self/mul_1" -> "797 bert/encoder/layer_4/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; -"161 bert/encoder/layer_3/attention/self/ExpandDims" -> "162 bert/encoder/layer_3/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; -"162 bert/encoder/layer_3/attention/self/sub" -> "163 bert/encoder/layer_3/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; -"163 bert/encoder/layer_3/attention/self/mul_1" -> "707 bert/encoder/layer_3/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; -"164 bert/encoder/layer_2/attention/self/ExpandDims" -> "165 bert/encoder/layer_2/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; -"165 bert/encoder/layer_2/attention/self/sub" -> "166 bert/encoder/layer_2/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; -"166 bert/encoder/layer_2/attention/self/mul_1" -> "617 bert/encoder/layer_2/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; -"167 bert/encoder/layer_11/attention/self/ExpandDims" -> "168 bert/encoder/layer_11/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; -"168 bert/encoder/layer_11/attention/self/sub" -> "169 bert/encoder/layer_11/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; -"169 bert/encoder/layer_11/attention/self/mul_1" -> "1427 bert/encoder/layer_11/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; -"170 bert/encoder/layer_10/attention/self/ExpandDims" -> "171 bert/encoder/layer_10/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; -"171 bert/encoder/layer_10/attention/self/sub" -> "172 bert/encoder/layer_10/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; -"172 bert/encoder/layer_10/attention/self/mul_1" -> "1337 bert/encoder/layer_10/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; -"173 bert/encoder/layer_1/attention/self/ExpandDims" -> "174 bert/encoder/layer_1/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; -"174 bert/encoder/layer_1/attention/self/sub" -> "175 bert/encoder/layer_1/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; -"175 bert/encoder/layer_1/attention/self/mul_1" -> "527 bert/encoder/layer_1/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; -"176 bert/encoder/layer_0/attention/self/ExpandDims" -> "177 bert/encoder/layer_0/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; -"177 bert/encoder/layer_0/attention/self/sub" -> "178 bert/encoder/layer_0/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; -"178 bert/encoder/layer_0/attention/self/mul_1" -> "437 bert/encoder/layer_0/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; -"179 bert/embeddings/Slice" -> "181 bert/embeddings/Reshape_4" [label="[256, 768]", style=solid]; -"180 bert/embeddings/Reshape_4__42" -> "181 bert/embeddings/Reshape_4" [label="[3]", style=dashed]; -"181 bert/embeddings/Reshape_4" -> "227 bert/embeddings/add_1" [label="[]", style=solid]; -"182 bert/embeddings/Reshape_3/shape_Unsqueeze__69" -> "209 bert/embeddings/Reshape_3/shape_Concat__70" [label="[1]", style=dashed]; -"183 bert/embeddings/Reshape_3/shape_Unsqueeze__68" -> "209 bert/embeddings/Reshape_3/shape_Concat__70" [label="[1]", style=dashed]; -"184 bert/embeddings/Reshape_2__43" -> "185 bert/embeddings/Reshape_2" [label="[1]", style=dashed]; -"185 bert/embeddings/Reshape_2" -> "219 bert/embeddings/one_hot" [label="[]", style=dashed]; -"186 bert/embeddings/Reshape_1/shape_Unsqueeze__57" -> "196 bert/embeddings/Reshape_1/shape_Concat__58" [label="[1]", style=dashed]; -"187 bert/embeddings/Reshape_1/shape_Unsqueeze__56" -> "196 bert/embeddings/Reshape_1/shape_Concat__58" [label="[1]", style=dashed]; -"188 bert/embeddings/Reshape__59" -> "198 bert/embeddings/Reshape" [label="[1]", style=dashed]; -"189 bert/embeddings/ExpandDims" -> "190 bert/embeddings/Shape" [label="[-1, 256, 1]", style=dashed]; -"189 bert/embeddings/ExpandDims" -> "198 bert/embeddings/Reshape" [label="[-1, 256, 1]", style=dashed]; -"190 bert/embeddings/Shape" -> "191 bert/embeddings/Shape__49" [label="[3]", style=dashed]; -"191 bert/embeddings/Shape__49" -> "192 bert/embeddings/strided_slice" [label="[3]", style=solid]; -"192 bert/embeddings/strided_slice" -> "193 bert/embeddings/strided_slice__53" [label="[1]", style=solid]; -"193 bert/embeddings/strided_slice__53" -> "194 bert/embeddings/strided_slice__54" [label="[]", style=solid]; -"194 bert/embeddings/strided_slice__54" -> "195 bert/embeddings/Reshape_1/shape_Unsqueeze__55" [label="[]", style=dashed]; -"195 bert/embeddings/Reshape_1/shape_Unsqueeze__55" -> "196 bert/embeddings/Reshape_1/shape_Concat__58" [label="[1]", style=dashed]; -"196 bert/embeddings/Reshape_1/shape_Concat__58" -> "197 bert/embeddings/Reshape_1__60" [label="[3]", style=dashed]; -"197 bert/embeddings/Reshape_1__60" -> "202 bert/embeddings/Reshape_1" [label="[3]", style=dashed]; -"198 bert/embeddings/Reshape" -> "201 bert/embeddings/GatherV2" [label="[]", style=dashed]; -"199 QuantizeLinear_bert/embeddings/word_embeddings^0_1" -> "200 DequantizeLinear_bert/embeddings/word_embeddings^0_1" [label="[30522, 768]", style=dashed]; -"200 DequantizeLinear_bert/embeddings/word_embeddings^0_1" -> "201 bert/embeddings/GatherV2" [label="[30522, 768]", style=solid]; -"201 bert/embeddings/GatherV2" -> "202 bert/embeddings/Reshape_1" [label="[]", style=solid]; -"202 bert/embeddings/Reshape_1" -> "203 bert/embeddings/Shape_1" [label="[]", style=solid]; -"202 bert/embeddings/Reshape_1" -> "226 bert/embeddings/add" [label="[]", style=solid]; -"203 bert/embeddings/Shape_1" -> "204 bert/embeddings/Shape_1__61" [label="[-1]", style=dashed]; -"204 bert/embeddings/Shape_1__61" -> "205 bert/embeddings/strided_slice_1" [label="[-1]", style=solid]; -"205 bert/embeddings/strided_slice_1" -> "206 bert/embeddings/strided_slice_1__65" [label="[-1]", style=solid]; -"206 bert/embeddings/strided_slice_1__65" -> "207 bert/embeddings/strided_slice_1__66" [label="[]", style=solid]; -"207 bert/embeddings/strided_slice_1__66" -> "208 bert/embeddings/Reshape_3/shape_Unsqueeze__67" [label="[]", style=dashed]; -"208 bert/embeddings/Reshape_3/shape_Unsqueeze__67" -> "209 bert/embeddings/Reshape_3/shape_Concat__70" [label="[1]", style=dashed]; -"209 bert/embeddings/Reshape_3/shape_Concat__70" -> "210 bert/embeddings/Reshape_3__71" [label="[3]", style=dashed]; -"210 bert/embeddings/Reshape_3__71" -> "225 bert/embeddings/Reshape_3" [label="[3]", style=dashed]; -"211 Unsqueeze__46" -> "218 Concat__47" [label="[1]", style=solid]; -"212 Unsqueeze__45" -> "218 Concat__47" [label="[1]", style=solid]; -"213 Unsqueeze__44" -> "219 bert/embeddings/one_hot" [label="[1]", style=dashed]; -"214 Reshape_1/shape_Unsqueeze__480" -> "1499 Reshape_1/shape_Concat__481" [label="[1]", style=dashed]; -"215 Reshape_1/shape_Unsqueeze__479" -> "1499 Reshape_1/shape_Concat__481" [label="[1]", style=dashed]; -"216 Reshape/shape_Unsqueeze__483" -> "1496 Reshape/shape_Concat__484" [label="[1]", style=dashed]; -"217 MatMul__486" -> "1502 QuantizeLinear_MatMul__486^0_1" [label="[768, 2]", style=solid]; -"218 Concat__47" -> "219 bert/embeddings/one_hot" [label="[2]", style=solid]; -"219 bert/embeddings/one_hot" -> "220 QuantizeLinear_bert/embeddings/one_hot^0_1" [label="[]", style=solid]; -"220 QuantizeLinear_bert/embeddings/one_hot^0_1" -> "221 DequantizeLinear_bert/embeddings/one_hot^0_1" [label="[]", style=dashed]; -"221 DequantizeLinear_bert/embeddings/one_hot^0_1" -> "224 bert/embeddings/MatMul" [label="[]", style=solid]; -"222 QuantizeLinear_bert/embeddings/token_type_embeddings^0_1" -> "223 DequantizeLinear_bert/embeddings/token_type_embeddings^0_1" [label="[2, 768]", style=dashed]; -"223 DequantizeLinear_bert/embeddings/token_type_embeddings^0_1" -> "224 bert/embeddings/MatMul" [label="[2, 768]", style=solid]; -"224 bert/embeddings/MatMul" -> "225 bert/embeddings/Reshape_3" [label="[]", style=solid]; -"225 bert/embeddings/Reshape_3" -> "226 bert/embeddings/add" [label="[]", style=solid]; -"226 bert/embeddings/add" -> "227 bert/embeddings/add_1" [label="[]", style=solid]; -"227 bert/embeddings/add_1" -> "228 bert/embeddings/LayerNorm/moments/mean" [label="[]", style=solid]; -"227 bert/embeddings/add_1" -> "230 bert/embeddings/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"227 bert/embeddings/add_1" -> "239 bert/embeddings/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"228 bert/embeddings/LayerNorm/moments/mean" -> "229 bert/embeddings/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"228 bert/embeddings/LayerNorm/moments/mean" -> "237 bert/embeddings/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"229 bert/embeddings/LayerNorm/moments/StopGradient" -> "230 bert/embeddings/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"230 bert/embeddings/LayerNorm/moments/SquaredDifference" -> "231 bert/embeddings/LayerNorm/moments/SquaredDifference__72" [label="[]", style=solid]; -"231 bert/embeddings/LayerNorm/moments/SquaredDifference__72" -> "232 bert/embeddings/LayerNorm/moments/variance" [label="[]", style=solid]; -"232 bert/embeddings/LayerNorm/moments/variance" -> "233 bert/embeddings/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"233 bert/embeddings/LayerNorm/batchnorm/add" -> "234 bert/embeddings/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"234 bert/embeddings/LayerNorm/batchnorm/Rsqrt" -> "235 bert/embeddings/LayerNorm/batchnorm/Rsqrt__74" [label="[]", style=solid]; -"235 bert/embeddings/LayerNorm/batchnorm/Rsqrt__74" -> "236 bert/embeddings/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"236 bert/embeddings/LayerNorm/batchnorm/mul" -> "237 bert/embeddings/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"236 bert/embeddings/LayerNorm/batchnorm/mul" -> "239 bert/embeddings/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"237 bert/embeddings/LayerNorm/batchnorm/mul_2" -> "238 bert/embeddings/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"238 bert/embeddings/LayerNorm/batchnorm/sub" -> "240 bert/embeddings/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"239 bert/embeddings/LayerNorm/batchnorm/mul_1" -> "240 bert/embeddings/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"240 bert/embeddings/LayerNorm/batchnorm/add_1" -> "241 bert/encoder/Shape_2" [label="[]", style=solid]; -"240 bert/embeddings/LayerNorm/batchnorm/add_1" -> "405 bert/encoder/Reshape_1" [label="[]", style=solid]; -"241 bert/encoder/Shape_2" -> "242 bert/encoder/Shape_2__76" [label="[-1]", style=dashed]; -"242 bert/encoder/Shape_2__76" -> "243 bert/encoder/strided_slice_2" [label="[-1]", style=solid]; -"243 bert/encoder/strided_slice_2" -> "244 bert/encoder/strided_slice_2__80" [label="[-1]", style=solid]; -"244 bert/encoder/strided_slice_2__80" -> "245 bert/encoder/strided_slice_2__81" [label="[]", style=solid]; -"245 bert/encoder/strided_slice_2__81" -> "246 bert/encoder/layer_9/attention/self/mul_2" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "250 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__85" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "253 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__90" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "256 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__95" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "259 bert/encoder/layer_8/attention/self/mul_2" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "263 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__103" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "266 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__108" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "269 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__113" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "272 bert/encoder/layer_7/attention/self/mul_2" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "276 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__121" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "279 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__126" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "282 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__131" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "285 bert/encoder/layer_6/attention/self/mul_2" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "289 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__139" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "292 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__144" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "295 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__149" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "298 bert/encoder/layer_5/attention/self/mul_2" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "302 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__157" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "305 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__162" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "308 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__167" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "311 bert/encoder/layer_4/attention/self/mul_2" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "315 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__175" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "318 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__180" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "321 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__185" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "324 bert/encoder/layer_3/attention/self/mul_2" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "328 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__193" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "331 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__198" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "334 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__203" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "337 bert/encoder/layer_2/attention/self/mul_2" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "341 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__211" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "344 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__216" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "347 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__221" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "350 bert/encoder/layer_11/attention/self/mul_2" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "354 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__229" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "357 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__234" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "360 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__239" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "363 bert/encoder/layer_10/attention/self/mul_2" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "367 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__247" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "370 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__252" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "373 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__257" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "376 bert/encoder/layer_1/attention/self/mul_2" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "380 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__265" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "383 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__270" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "386 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__275" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "389 bert/encoder/layer_0/attention/self/mul_2" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "393 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__283" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "396 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__288" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "399 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__293" [label="[]", style=dashed]; -"245 bert/encoder/strided_slice_2__81" -> "402 bert/encoder/Reshape_13/shape_Unsqueeze__298" [label="[]", style=dashed]; -"246 bert/encoder/layer_9/attention/self/mul_2" -> "247 bert/encoder/layer_9/attention/self/Reshape_3/shape_Unsqueeze__82" [label="[]", style=dashed]; -"247 bert/encoder/layer_9/attention/self/Reshape_3/shape_Unsqueeze__82" -> "248 bert/encoder/layer_9/attention/self/Reshape_3/shape_Concat__84" [label="[1]", style=dashed]; -"248 bert/encoder/layer_9/attention/self/Reshape_3/shape_Concat__84" -> "249 bert/encoder/layer_9/attention/self/Reshape_3__434" [label="[2]", style=dashed]; -"249 bert/encoder/layer_9/attention/self/Reshape_3__434" -> "1253 bert/encoder/layer_9/attention/self/Reshape_3" [label="[2]", style=dashed]; -"250 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__85" -> "251 bert/encoder/layer_9/attention/self/Reshape_2/shape_Concat__89" [label="[1]", style=dashed]; -"251 bert/encoder/layer_9/attention/self/Reshape_2/shape_Concat__89" -> "252 bert/encoder/layer_9/attention/self/Reshape_2__429" [label="[4]", style=dashed]; -"252 bert/encoder/layer_9/attention/self/Reshape_2__429" -> "1226 bert/encoder/layer_9/attention/self/Reshape_2" [label="[4]", style=dashed]; -"253 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__90" -> "254 bert/encoder/layer_9/attention/self/Reshape_1/shape_Concat__94" [label="[1]", style=dashed]; -"254 bert/encoder/layer_9/attention/self/Reshape_1/shape_Concat__94" -> "255 bert/encoder/layer_9/attention/self/Reshape_1__431" [label="[4]", style=dashed]; -"255 bert/encoder/layer_9/attention/self/Reshape_1__431" -> "1242 bert/encoder/layer_9/attention/self/Reshape_1" [label="[4]", style=dashed]; -"256 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__95" -> "257 bert/encoder/layer_9/attention/self/Reshape/shape_Concat__99" [label="[1]", style=dashed]; -"257 bert/encoder/layer_9/attention/self/Reshape/shape_Concat__99" -> "258 bert/encoder/layer_9/attention/self/Reshape__430" [label="[4]", style=dashed]; -"258 bert/encoder/layer_9/attention/self/Reshape__430" -> "1234 bert/encoder/layer_9/attention/self/Reshape" [label="[4]", style=dashed]; -"259 bert/encoder/layer_8/attention/self/mul_2" -> "260 bert/encoder/layer_8/attention/self/Reshape_3/shape_Unsqueeze__100" [label="[]", style=dashed]; -"260 bert/encoder/layer_8/attention/self/Reshape_3/shape_Unsqueeze__100" -> "261 bert/encoder/layer_8/attention/self/Reshape_3/shape_Concat__102" [label="[1]", style=dashed]; -"261 bert/encoder/layer_8/attention/self/Reshape_3/shape_Concat__102" -> "262 bert/encoder/layer_8/attention/self/Reshape_3__420" [label="[2]", style=dashed]; -"262 bert/encoder/layer_8/attention/self/Reshape_3__420" -> "1163 bert/encoder/layer_8/attention/self/Reshape_3" [label="[2]", style=dashed]; -"263 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__103" -> "264 bert/encoder/layer_8/attention/self/Reshape_2/shape_Concat__107" [label="[1]", style=dashed]; -"264 bert/encoder/layer_8/attention/self/Reshape_2/shape_Concat__107" -> "265 bert/encoder/layer_8/attention/self/Reshape_2__415" [label="[4]", style=dashed]; -"265 bert/encoder/layer_8/attention/self/Reshape_2__415" -> "1136 bert/encoder/layer_8/attention/self/Reshape_2" [label="[4]", style=dashed]; -"266 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__108" -> "267 bert/encoder/layer_8/attention/self/Reshape_1/shape_Concat__112" [label="[1]", style=dashed]; -"267 bert/encoder/layer_8/attention/self/Reshape_1/shape_Concat__112" -> "268 bert/encoder/layer_8/attention/self/Reshape_1__417" [label="[4]", style=dashed]; -"268 bert/encoder/layer_8/attention/self/Reshape_1__417" -> "1152 bert/encoder/layer_8/attention/self/Reshape_1" [label="[4]", style=dashed]; -"269 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__113" -> "270 bert/encoder/layer_8/attention/self/Reshape/shape_Concat__117" [label="[1]", style=dashed]; -"270 bert/encoder/layer_8/attention/self/Reshape/shape_Concat__117" -> "271 bert/encoder/layer_8/attention/self/Reshape__416" [label="[4]", style=dashed]; -"271 bert/encoder/layer_8/attention/self/Reshape__416" -> "1144 bert/encoder/layer_8/attention/self/Reshape" [label="[4]", style=dashed]; -"272 bert/encoder/layer_7/attention/self/mul_2" -> "273 bert/encoder/layer_7/attention/self/Reshape_3/shape_Unsqueeze__118" [label="[]", style=dashed]; -"273 bert/encoder/layer_7/attention/self/Reshape_3/shape_Unsqueeze__118" -> "274 bert/encoder/layer_7/attention/self/Reshape_3/shape_Concat__120" [label="[1]", style=dashed]; -"274 bert/encoder/layer_7/attention/self/Reshape_3/shape_Concat__120" -> "275 bert/encoder/layer_7/attention/self/Reshape_3__406" [label="[2]", style=dashed]; -"275 bert/encoder/layer_7/attention/self/Reshape_3__406" -> "1073 bert/encoder/layer_7/attention/self/Reshape_3" [label="[2]", style=dashed]; -"276 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__121" -> "277 bert/encoder/layer_7/attention/self/Reshape_2/shape_Concat__125" [label="[1]", style=dashed]; -"277 bert/encoder/layer_7/attention/self/Reshape_2/shape_Concat__125" -> "278 bert/encoder/layer_7/attention/self/Reshape_2__401" [label="[4]", style=dashed]; -"278 bert/encoder/layer_7/attention/self/Reshape_2__401" -> "1046 bert/encoder/layer_7/attention/self/Reshape_2" [label="[4]", style=dashed]; -"279 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__126" -> "280 bert/encoder/layer_7/attention/self/Reshape_1/shape_Concat__130" [label="[1]", style=dashed]; -"280 bert/encoder/layer_7/attention/self/Reshape_1/shape_Concat__130" -> "281 bert/encoder/layer_7/attention/self/Reshape_1__403" [label="[4]", style=dashed]; -"281 bert/encoder/layer_7/attention/self/Reshape_1__403" -> "1062 bert/encoder/layer_7/attention/self/Reshape_1" [label="[4]", style=dashed]; -"282 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__131" -> "283 bert/encoder/layer_7/attention/self/Reshape/shape_Concat__135" [label="[1]", style=dashed]; -"283 bert/encoder/layer_7/attention/self/Reshape/shape_Concat__135" -> "284 bert/encoder/layer_7/attention/self/Reshape__402" [label="[4]", style=dashed]; -"284 bert/encoder/layer_7/attention/self/Reshape__402" -> "1054 bert/encoder/layer_7/attention/self/Reshape" [label="[4]", style=dashed]; -"285 bert/encoder/layer_6/attention/self/mul_2" -> "286 bert/encoder/layer_6/attention/self/Reshape_3/shape_Unsqueeze__136" [label="[]", style=dashed]; -"286 bert/encoder/layer_6/attention/self/Reshape_3/shape_Unsqueeze__136" -> "287 bert/encoder/layer_6/attention/self/Reshape_3/shape_Concat__138" [label="[1]", style=dashed]; -"287 bert/encoder/layer_6/attention/self/Reshape_3/shape_Concat__138" -> "288 bert/encoder/layer_6/attention/self/Reshape_3__392" [label="[2]", style=dashed]; -"288 bert/encoder/layer_6/attention/self/Reshape_3__392" -> "983 bert/encoder/layer_6/attention/self/Reshape_3" [label="[2]", style=dashed]; -"289 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__139" -> "290 bert/encoder/layer_6/attention/self/Reshape_2/shape_Concat__143" [label="[1]", style=dashed]; -"290 bert/encoder/layer_6/attention/self/Reshape_2/shape_Concat__143" -> "291 bert/encoder/layer_6/attention/self/Reshape_2__387" [label="[4]", style=dashed]; -"291 bert/encoder/layer_6/attention/self/Reshape_2__387" -> "956 bert/encoder/layer_6/attention/self/Reshape_2" [label="[4]", style=dashed]; -"292 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__144" -> "293 bert/encoder/layer_6/attention/self/Reshape_1/shape_Concat__148" [label="[1]", style=dashed]; -"293 bert/encoder/layer_6/attention/self/Reshape_1/shape_Concat__148" -> "294 bert/encoder/layer_6/attention/self/Reshape_1__389" [label="[4]", style=dashed]; -"294 bert/encoder/layer_6/attention/self/Reshape_1__389" -> "972 bert/encoder/layer_6/attention/self/Reshape_1" [label="[4]", style=dashed]; -"295 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__149" -> "296 bert/encoder/layer_6/attention/self/Reshape/shape_Concat__153" [label="[1]", style=dashed]; -"296 bert/encoder/layer_6/attention/self/Reshape/shape_Concat__153" -> "297 bert/encoder/layer_6/attention/self/Reshape__388" [label="[4]", style=dashed]; -"297 bert/encoder/layer_6/attention/self/Reshape__388" -> "964 bert/encoder/layer_6/attention/self/Reshape" [label="[4]", style=dashed]; -"298 bert/encoder/layer_5/attention/self/mul_2" -> "299 bert/encoder/layer_5/attention/self/Reshape_3/shape_Unsqueeze__154" [label="[]", style=dashed]; -"299 bert/encoder/layer_5/attention/self/Reshape_3/shape_Unsqueeze__154" -> "300 bert/encoder/layer_5/attention/self/Reshape_3/shape_Concat__156" [label="[1]", style=dashed]; -"300 bert/encoder/layer_5/attention/self/Reshape_3/shape_Concat__156" -> "301 bert/encoder/layer_5/attention/self/Reshape_3__378" [label="[2]", style=dashed]; -"301 bert/encoder/layer_5/attention/self/Reshape_3__378" -> "893 bert/encoder/layer_5/attention/self/Reshape_3" [label="[2]", style=dashed]; -"302 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__157" -> "303 bert/encoder/layer_5/attention/self/Reshape_2/shape_Concat__161" [label="[1]", style=dashed]; -"303 bert/encoder/layer_5/attention/self/Reshape_2/shape_Concat__161" -> "304 bert/encoder/layer_5/attention/self/Reshape_2__373" [label="[4]", style=dashed]; -"304 bert/encoder/layer_5/attention/self/Reshape_2__373" -> "866 bert/encoder/layer_5/attention/self/Reshape_2" [label="[4]", style=dashed]; -"305 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__162" -> "306 bert/encoder/layer_5/attention/self/Reshape_1/shape_Concat__166" [label="[1]", style=dashed]; -"306 bert/encoder/layer_5/attention/self/Reshape_1/shape_Concat__166" -> "307 bert/encoder/layer_5/attention/self/Reshape_1__375" [label="[4]", style=dashed]; -"307 bert/encoder/layer_5/attention/self/Reshape_1__375" -> "882 bert/encoder/layer_5/attention/self/Reshape_1" [label="[4]", style=dashed]; -"308 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__167" -> "309 bert/encoder/layer_5/attention/self/Reshape/shape_Concat__171" [label="[1]", style=dashed]; -"309 bert/encoder/layer_5/attention/self/Reshape/shape_Concat__171" -> "310 bert/encoder/layer_5/attention/self/Reshape__374" [label="[4]", style=dashed]; -"310 bert/encoder/layer_5/attention/self/Reshape__374" -> "874 bert/encoder/layer_5/attention/self/Reshape" [label="[4]", style=dashed]; -"311 bert/encoder/layer_4/attention/self/mul_2" -> "312 bert/encoder/layer_4/attention/self/Reshape_3/shape_Unsqueeze__172" [label="[]", style=dashed]; -"312 bert/encoder/layer_4/attention/self/Reshape_3/shape_Unsqueeze__172" -> "313 bert/encoder/layer_4/attention/self/Reshape_3/shape_Concat__174" [label="[1]", style=dashed]; -"313 bert/encoder/layer_4/attention/self/Reshape_3/shape_Concat__174" -> "314 bert/encoder/layer_4/attention/self/Reshape_3__364" [label="[2]", style=dashed]; -"314 bert/encoder/layer_4/attention/self/Reshape_3__364" -> "803 bert/encoder/layer_4/attention/self/Reshape_3" [label="[2]", style=dashed]; -"315 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__175" -> "316 bert/encoder/layer_4/attention/self/Reshape_2/shape_Concat__179" [label="[1]", style=dashed]; -"316 bert/encoder/layer_4/attention/self/Reshape_2/shape_Concat__179" -> "317 bert/encoder/layer_4/attention/self/Reshape_2__359" [label="[4]", style=dashed]; -"317 bert/encoder/layer_4/attention/self/Reshape_2__359" -> "776 bert/encoder/layer_4/attention/self/Reshape_2" [label="[4]", style=dashed]; -"318 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__180" -> "319 bert/encoder/layer_4/attention/self/Reshape_1/shape_Concat__184" [label="[1]", style=dashed]; -"319 bert/encoder/layer_4/attention/self/Reshape_1/shape_Concat__184" -> "320 bert/encoder/layer_4/attention/self/Reshape_1__361" [label="[4]", style=dashed]; -"320 bert/encoder/layer_4/attention/self/Reshape_1__361" -> "792 bert/encoder/layer_4/attention/self/Reshape_1" [label="[4]", style=dashed]; -"321 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__185" -> "322 bert/encoder/layer_4/attention/self/Reshape/shape_Concat__189" [label="[1]", style=dashed]; -"322 bert/encoder/layer_4/attention/self/Reshape/shape_Concat__189" -> "323 bert/encoder/layer_4/attention/self/Reshape__360" [label="[4]", style=dashed]; -"323 bert/encoder/layer_4/attention/self/Reshape__360" -> "784 bert/encoder/layer_4/attention/self/Reshape" [label="[4]", style=dashed]; -"324 bert/encoder/layer_3/attention/self/mul_2" -> "325 bert/encoder/layer_3/attention/self/Reshape_3/shape_Unsqueeze__190" [label="[]", style=dashed]; -"325 bert/encoder/layer_3/attention/self/Reshape_3/shape_Unsqueeze__190" -> "326 bert/encoder/layer_3/attention/self/Reshape_3/shape_Concat__192" [label="[1]", style=dashed]; -"326 bert/encoder/layer_3/attention/self/Reshape_3/shape_Concat__192" -> "327 bert/encoder/layer_3/attention/self/Reshape_3__350" [label="[2]", style=dashed]; -"327 bert/encoder/layer_3/attention/self/Reshape_3__350" -> "713 bert/encoder/layer_3/attention/self/Reshape_3" [label="[2]", style=dashed]; -"328 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__193" -> "329 bert/encoder/layer_3/attention/self/Reshape_2/shape_Concat__197" [label="[1]", style=dashed]; -"329 bert/encoder/layer_3/attention/self/Reshape_2/shape_Concat__197" -> "330 bert/encoder/layer_3/attention/self/Reshape_2__345" [label="[4]", style=dashed]; -"330 bert/encoder/layer_3/attention/self/Reshape_2__345" -> "686 bert/encoder/layer_3/attention/self/Reshape_2" [label="[4]", style=dashed]; -"331 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__198" -> "332 bert/encoder/layer_3/attention/self/Reshape_1/shape_Concat__202" [label="[1]", style=dashed]; -"332 bert/encoder/layer_3/attention/self/Reshape_1/shape_Concat__202" -> "333 bert/encoder/layer_3/attention/self/Reshape_1__347" [label="[4]", style=dashed]; -"333 bert/encoder/layer_3/attention/self/Reshape_1__347" -> "702 bert/encoder/layer_3/attention/self/Reshape_1" [label="[4]", style=dashed]; -"334 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__203" -> "335 bert/encoder/layer_3/attention/self/Reshape/shape_Concat__207" [label="[1]", style=dashed]; -"335 bert/encoder/layer_3/attention/self/Reshape/shape_Concat__207" -> "336 bert/encoder/layer_3/attention/self/Reshape__346" [label="[4]", style=dashed]; -"336 bert/encoder/layer_3/attention/self/Reshape__346" -> "694 bert/encoder/layer_3/attention/self/Reshape" [label="[4]", style=dashed]; -"337 bert/encoder/layer_2/attention/self/mul_2" -> "338 bert/encoder/layer_2/attention/self/Reshape_3/shape_Unsqueeze__208" [label="[]", style=dashed]; -"338 bert/encoder/layer_2/attention/self/Reshape_3/shape_Unsqueeze__208" -> "339 bert/encoder/layer_2/attention/self/Reshape_3/shape_Concat__210" [label="[1]", style=dashed]; -"339 bert/encoder/layer_2/attention/self/Reshape_3/shape_Concat__210" -> "340 bert/encoder/layer_2/attention/self/Reshape_3__336" [label="[2]", style=dashed]; -"340 bert/encoder/layer_2/attention/self/Reshape_3__336" -> "623 bert/encoder/layer_2/attention/self/Reshape_3" [label="[2]", style=dashed]; -"341 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__211" -> "342 bert/encoder/layer_2/attention/self/Reshape_2/shape_Concat__215" [label="[1]", style=dashed]; -"342 bert/encoder/layer_2/attention/self/Reshape_2/shape_Concat__215" -> "343 bert/encoder/layer_2/attention/self/Reshape_2__331" [label="[4]", style=dashed]; -"343 bert/encoder/layer_2/attention/self/Reshape_2__331" -> "596 bert/encoder/layer_2/attention/self/Reshape_2" [label="[4]", style=dashed]; -"344 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__216" -> "345 bert/encoder/layer_2/attention/self/Reshape_1/shape_Concat__220" [label="[1]", style=dashed]; -"345 bert/encoder/layer_2/attention/self/Reshape_1/shape_Concat__220" -> "346 bert/encoder/layer_2/attention/self/Reshape_1__333" [label="[4]", style=dashed]; -"346 bert/encoder/layer_2/attention/self/Reshape_1__333" -> "612 bert/encoder/layer_2/attention/self/Reshape_1" [label="[4]", style=dashed]; -"347 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__221" -> "348 bert/encoder/layer_2/attention/self/Reshape/shape_Concat__225" [label="[1]", style=dashed]; -"348 bert/encoder/layer_2/attention/self/Reshape/shape_Concat__225" -> "349 bert/encoder/layer_2/attention/self/Reshape__332" [label="[4]", style=dashed]; -"349 bert/encoder/layer_2/attention/self/Reshape__332" -> "604 bert/encoder/layer_2/attention/self/Reshape" [label="[4]", style=dashed]; -"350 bert/encoder/layer_11/attention/self/mul_2" -> "351 bert/encoder/layer_11/attention/self/Reshape_3/shape_Unsqueeze__226" [label="[]", style=dashed]; -"351 bert/encoder/layer_11/attention/self/Reshape_3/shape_Unsqueeze__226" -> "352 bert/encoder/layer_11/attention/self/Reshape_3/shape_Concat__228" [label="[1]", style=dashed]; -"352 bert/encoder/layer_11/attention/self/Reshape_3/shape_Concat__228" -> "353 bert/encoder/layer_11/attention/self/Reshape_3__462" [label="[2]", style=dashed]; -"353 bert/encoder/layer_11/attention/self/Reshape_3__462" -> "1433 bert/encoder/layer_11/attention/self/Reshape_3" [label="[2]", style=dashed]; -"354 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__229" -> "355 bert/encoder/layer_11/attention/self/Reshape_2/shape_Concat__233" [label="[1]", style=dashed]; -"355 bert/encoder/layer_11/attention/self/Reshape_2/shape_Concat__233" -> "356 bert/encoder/layer_11/attention/self/Reshape_2__457" [label="[4]", style=dashed]; -"356 bert/encoder/layer_11/attention/self/Reshape_2__457" -> "1406 bert/encoder/layer_11/attention/self/Reshape_2" [label="[4]", style=dashed]; -"357 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__234" -> "358 bert/encoder/layer_11/attention/self/Reshape_1/shape_Concat__238" [label="[1]", style=dashed]; -"358 bert/encoder/layer_11/attention/self/Reshape_1/shape_Concat__238" -> "359 bert/encoder/layer_11/attention/self/Reshape_1__459" [label="[4]", style=dashed]; -"359 bert/encoder/layer_11/attention/self/Reshape_1__459" -> "1422 bert/encoder/layer_11/attention/self/Reshape_1" [label="[4]", style=dashed]; -"360 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__239" -> "361 bert/encoder/layer_11/attention/self/Reshape/shape_Concat__243" [label="[1]", style=dashed]; -"361 bert/encoder/layer_11/attention/self/Reshape/shape_Concat__243" -> "362 bert/encoder/layer_11/attention/self/Reshape__458" [label="[4]", style=dashed]; -"362 bert/encoder/layer_11/attention/self/Reshape__458" -> "1414 bert/encoder/layer_11/attention/self/Reshape" [label="[4]", style=dashed]; -"363 bert/encoder/layer_10/attention/self/mul_2" -> "364 bert/encoder/layer_10/attention/self/Reshape_3/shape_Unsqueeze__244" [label="[]", style=dashed]; -"364 bert/encoder/layer_10/attention/self/Reshape_3/shape_Unsqueeze__244" -> "365 bert/encoder/layer_10/attention/self/Reshape_3/shape_Concat__246" [label="[1]", style=dashed]; -"365 bert/encoder/layer_10/attention/self/Reshape_3/shape_Concat__246" -> "366 bert/encoder/layer_10/attention/self/Reshape_3__448" [label="[2]", style=dashed]; -"366 bert/encoder/layer_10/attention/self/Reshape_3__448" -> "1343 bert/encoder/layer_10/attention/self/Reshape_3" [label="[2]", style=dashed]; -"367 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__247" -> "368 bert/encoder/layer_10/attention/self/Reshape_2/shape_Concat__251" [label="[1]", style=dashed]; -"368 bert/encoder/layer_10/attention/self/Reshape_2/shape_Concat__251" -> "369 bert/encoder/layer_10/attention/self/Reshape_2__443" [label="[4]", style=dashed]; -"369 bert/encoder/layer_10/attention/self/Reshape_2__443" -> "1316 bert/encoder/layer_10/attention/self/Reshape_2" [label="[4]", style=dashed]; -"370 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__252" -> "371 bert/encoder/layer_10/attention/self/Reshape_1/shape_Concat__256" [label="[1]", style=dashed]; -"371 bert/encoder/layer_10/attention/self/Reshape_1/shape_Concat__256" -> "372 bert/encoder/layer_10/attention/self/Reshape_1__445" [label="[4]", style=dashed]; -"372 bert/encoder/layer_10/attention/self/Reshape_1__445" -> "1332 bert/encoder/layer_10/attention/self/Reshape_1" [label="[4]", style=dashed]; -"373 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__257" -> "374 bert/encoder/layer_10/attention/self/Reshape/shape_Concat__261" [label="[1]", style=dashed]; -"374 bert/encoder/layer_10/attention/self/Reshape/shape_Concat__261" -> "375 bert/encoder/layer_10/attention/self/Reshape__444" [label="[4]", style=dashed]; -"375 bert/encoder/layer_10/attention/self/Reshape__444" -> "1324 bert/encoder/layer_10/attention/self/Reshape" [label="[4]", style=dashed]; -"376 bert/encoder/layer_1/attention/self/mul_2" -> "377 bert/encoder/layer_1/attention/self/Reshape_3/shape_Unsqueeze__262" [label="[]", style=dashed]; -"377 bert/encoder/layer_1/attention/self/Reshape_3/shape_Unsqueeze__262" -> "378 bert/encoder/layer_1/attention/self/Reshape_3/shape_Concat__264" [label="[1]", style=dashed]; -"378 bert/encoder/layer_1/attention/self/Reshape_3/shape_Concat__264" -> "379 bert/encoder/layer_1/attention/self/Reshape_3__322" [label="[2]", style=dashed]; -"379 bert/encoder/layer_1/attention/self/Reshape_3__322" -> "533 bert/encoder/layer_1/attention/self/Reshape_3" [label="[2]", style=dashed]; -"380 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__265" -> "381 bert/encoder/layer_1/attention/self/Reshape_2/shape_Concat__269" [label="[1]", style=dashed]; -"381 bert/encoder/layer_1/attention/self/Reshape_2/shape_Concat__269" -> "382 bert/encoder/layer_1/attention/self/Reshape_2__317" [label="[4]", style=dashed]; -"382 bert/encoder/layer_1/attention/self/Reshape_2__317" -> "506 bert/encoder/layer_1/attention/self/Reshape_2" [label="[4]", style=dashed]; -"383 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__270" -> "384 bert/encoder/layer_1/attention/self/Reshape_1/shape_Concat__274" [label="[1]", style=dashed]; -"384 bert/encoder/layer_1/attention/self/Reshape_1/shape_Concat__274" -> "385 bert/encoder/layer_1/attention/self/Reshape_1__319" [label="[4]", style=dashed]; -"385 bert/encoder/layer_1/attention/self/Reshape_1__319" -> "522 bert/encoder/layer_1/attention/self/Reshape_1" [label="[4]", style=dashed]; -"386 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__275" -> "387 bert/encoder/layer_1/attention/self/Reshape/shape_Concat__279" [label="[1]", style=dashed]; -"387 bert/encoder/layer_1/attention/self/Reshape/shape_Concat__279" -> "388 bert/encoder/layer_1/attention/self/Reshape__318" [label="[4]", style=dashed]; -"388 bert/encoder/layer_1/attention/self/Reshape__318" -> "514 bert/encoder/layer_1/attention/self/Reshape" [label="[4]", style=dashed]; -"389 bert/encoder/layer_0/attention/self/mul_2" -> "390 bert/encoder/layer_0/attention/self/Reshape_3/shape_Unsqueeze__280" [label="[]", style=dashed]; -"390 bert/encoder/layer_0/attention/self/Reshape_3/shape_Unsqueeze__280" -> "391 bert/encoder/layer_0/attention/self/Reshape_3/shape_Concat__282" [label="[1]", style=dashed]; -"391 bert/encoder/layer_0/attention/self/Reshape_3/shape_Concat__282" -> "392 bert/encoder/layer_0/attention/self/Reshape_3__308" [label="[2]", style=dashed]; -"392 bert/encoder/layer_0/attention/self/Reshape_3__308" -> "443 bert/encoder/layer_0/attention/self/Reshape_3" [label="[2]", style=dashed]; -"393 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__283" -> "394 bert/encoder/layer_0/attention/self/Reshape_2/shape_Concat__287" [label="[1]", style=dashed]; -"394 bert/encoder/layer_0/attention/self/Reshape_2/shape_Concat__287" -> "395 bert/encoder/layer_0/attention/self/Reshape_2__303" [label="[4]", style=dashed]; -"395 bert/encoder/layer_0/attention/self/Reshape_2__303" -> "416 bert/encoder/layer_0/attention/self/Reshape_2" [label="[4]", style=dashed]; -"396 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__288" -> "397 bert/encoder/layer_0/attention/self/Reshape_1/shape_Concat__292" [label="[1]", style=dashed]; -"397 bert/encoder/layer_0/attention/self/Reshape_1/shape_Concat__292" -> "398 bert/encoder/layer_0/attention/self/Reshape_1__305" [label="[4]", style=dashed]; -"398 bert/encoder/layer_0/attention/self/Reshape_1__305" -> "432 bert/encoder/layer_0/attention/self/Reshape_1" [label="[4]", style=dashed]; -"399 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__293" -> "400 bert/encoder/layer_0/attention/self/Reshape/shape_Concat__297" [label="[1]", style=dashed]; -"400 bert/encoder/layer_0/attention/self/Reshape/shape_Concat__297" -> "401 bert/encoder/layer_0/attention/self/Reshape__304" [label="[4]", style=dashed]; -"401 bert/encoder/layer_0/attention/self/Reshape__304" -> "424 bert/encoder/layer_0/attention/self/Reshape" [label="[4]", style=dashed]; -"402 bert/encoder/Reshape_13/shape_Unsqueeze__298" -> "403 bert/encoder/Reshape_13/shape_Concat__301" [label="[1]", style=dashed]; -"403 bert/encoder/Reshape_13/shape_Concat__301" -> "404 bert/encoder/Reshape_13__471" [label="[3]", style=dashed]; -"404 bert/encoder/Reshape_13__471" -> "1488 bert/encoder/Reshape_13" [label="[3]", style=dashed]; -"405 bert/encoder/Reshape_1" -> "406 QuantizeLinear_bert/encoder/Reshape_1^0_1" [label="[]", style=solid]; -"405 bert/encoder/Reshape_1" -> "410 QuantizeLinear_bert/encoder/Reshape_1^0_2" [label="[]", style=solid]; -"405 bert/encoder/Reshape_1" -> "412 QuantizeLinear_bert/encoder/Reshape_1^0_3" [label="[]", style=solid]; -"405 bert/encoder/Reshape_1" -> "448 bert/encoder/layer_0/attention/output/add" [label="[]", style=solid]; -"406 QuantizeLinear_bert/encoder/Reshape_1^0_1" -> "407 DequantizeLinear_bert/encoder/Reshape_1^0_1" [label="[]", style=dashed]; -"407 DequantizeLinear_bert/encoder/Reshape_1^0_1" -> "414 bert/encoder/layer_0/attention/self/value/MatMul" [label="[]", style=solid]; -"408 QuantizeLinear_bert/encoder/layer_0/attention/self/value/kernel^0_1" -> "409 DequantizeLinear_bert/encoder/layer_0/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; -"409 DequantizeLinear_bert/encoder/layer_0/attention/self/value/kernel^0_1" -> "414 bert/encoder/layer_0/attention/self/value/MatMul" [label="[768, 768]", style=solid]; -"410 QuantizeLinear_bert/encoder/Reshape_1^0_2" -> "411 DequantizeLinear_bert/encoder/Reshape_1^0_2" [label="[]", style=dashed]; -"411 DequantizeLinear_bert/encoder/Reshape_1^0_2" -> "420 bert/encoder/layer_0/attention/self/query/MatMul" [label="[]", style=solid]; -"412 QuantizeLinear_bert/encoder/Reshape_1^0_3" -> "413 DequantizeLinear_bert/encoder/Reshape_1^0_3" [label="[]", style=dashed]; -"413 DequantizeLinear_bert/encoder/Reshape_1^0_3" -> "428 bert/encoder/layer_0/attention/self/key/MatMul" [label="[]", style=solid]; -"414 bert/encoder/layer_0/attention/self/value/MatMul" -> "415 bert/encoder/layer_0/attention/self/value/BiasAdd" [label="[]", style=solid]; -"415 bert/encoder/layer_0/attention/self/value/BiasAdd" -> "416 bert/encoder/layer_0/attention/self/Reshape_2" [label="[]", style=solid]; -"416 bert/encoder/layer_0/attention/self/Reshape_2" -> "417 bert/encoder/layer_0/attention/self/transpose_2" [label="[]", style=solid]; -"417 bert/encoder/layer_0/attention/self/transpose_2" -> "439 bert/encoder/layer_0/attention/self/MatMul_1" [label="[]", style=solid]; -"418 QuantizeLinear_bert/encoder/layer_0/attention/self/query/kernel^0_1" -> "419 DequantizeLinear_bert/encoder/layer_0/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; -"419 DequantizeLinear_bert/encoder/layer_0/attention/self/query/kernel^0_1" -> "420 bert/encoder/layer_0/attention/self/query/MatMul" [label="[768, 768]", style=solid]; -"420 bert/encoder/layer_0/attention/self/query/MatMul" -> "421 bert/encoder/layer_0/attention/self/query/BiasAdd" [label="[]", style=solid]; -"421 bert/encoder/layer_0/attention/self/query/BiasAdd" -> "422 QuantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; -"422 QuantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd^0_1" -> "423 DequantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; -"423 DequantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd^0_1" -> "424 bert/encoder/layer_0/attention/self/Reshape" [label="[]", style=solid]; -"424 bert/encoder/layer_0/attention/self/Reshape" -> "425 bert/encoder/layer_0/attention/self/transpose" [label="[]", style=solid]; -"425 bert/encoder/layer_0/attention/self/transpose" -> "435 bert/encoder/layer_0/attention/self/MatMul" [label="[]", style=solid]; -"426 QuantizeLinear_bert/encoder/layer_0/attention/self/key/kernel^0_1" -> "427 DequantizeLinear_bert/encoder/layer_0/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; -"427 DequantizeLinear_bert/encoder/layer_0/attention/self/key/kernel^0_1" -> "428 bert/encoder/layer_0/attention/self/key/MatMul" [label="[768, 768]", style=solid]; -"428 bert/encoder/layer_0/attention/self/key/MatMul" -> "429 bert/encoder/layer_0/attention/self/key/BiasAdd" [label="[]", style=solid]; -"429 bert/encoder/layer_0/attention/self/key/BiasAdd" -> "430 QuantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; -"430 QuantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd^0_1" -> "431 DequantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; -"431 DequantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd^0_1" -> "432 bert/encoder/layer_0/attention/self/Reshape_1" [label="[]", style=solid]; -"432 bert/encoder/layer_0/attention/self/Reshape_1" -> "433 bert/encoder/layer_0/attention/self/transpose_1" [label="[]", style=solid]; -"433 bert/encoder/layer_0/attention/self/transpose_1" -> "434 bert/encoder/layer_0/attention/self/MatMul__306" [label="[]", style=solid]; -"434 bert/encoder/layer_0/attention/self/MatMul__306" -> "435 bert/encoder/layer_0/attention/self/MatMul" [label="[]", style=solid]; -"435 bert/encoder/layer_0/attention/self/MatMul" -> "436 bert/encoder/layer_0/attention/self/Mul" [label="[]", style=solid]; -"436 bert/encoder/layer_0/attention/self/Mul" -> "437 bert/encoder/layer_0/attention/self/add" [label="[]", style=solid]; -"437 bert/encoder/layer_0/attention/self/add" -> "438 bert/encoder/layer_0/attention/self/Softmax" [label="[]", style=solid]; -"438 bert/encoder/layer_0/attention/self/Softmax" -> "439 bert/encoder/layer_0/attention/self/MatMul_1" [label="[]", style=solid]; -"439 bert/encoder/layer_0/attention/self/MatMul_1" -> "440 QuantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1^0_1" [label="[]", style=solid]; -"440 QuantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1^0_1" -> "441 DequantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; -"441 DequantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1^0_1" -> "442 bert/encoder/layer_0/attention/self/transpose_3" [label="[]", style=solid]; -"442 bert/encoder/layer_0/attention/self/transpose_3" -> "443 bert/encoder/layer_0/attention/self/Reshape_3" [label="[]", style=solid]; -"443 bert/encoder/layer_0/attention/self/Reshape_3" -> "446 bert/encoder/layer_0/attention/output/dense/MatMul" [label="[]", style=solid]; -"444 QuantizeLinear_bert/encoder/layer_0/attention/output/dense/kernel^0_1" -> "445 DequantizeLinear_bert/encoder/layer_0/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; -"445 DequantizeLinear_bert/encoder/layer_0/attention/output/dense/kernel^0_1" -> "446 bert/encoder/layer_0/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; -"446 bert/encoder/layer_0/attention/output/dense/MatMul" -> "447 bert/encoder/layer_0/attention/output/dense/BiasAdd" [label="[]", style=solid]; -"447 bert/encoder/layer_0/attention/output/dense/BiasAdd" -> "448 bert/encoder/layer_0/attention/output/add" [label="[]", style=solid]; -"448 bert/encoder/layer_0/attention/output/add" -> "449 bert/encoder/layer_0/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"448 bert/encoder/layer_0/attention/output/add" -> "451 bert/encoder/layer_0/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"448 bert/encoder/layer_0/attention/output/add" -> "460 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"449 bert/encoder/layer_0/attention/output/LayerNorm/moments/mean" -> "450 bert/encoder/layer_0/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"449 bert/encoder/layer_0/attention/output/LayerNorm/moments/mean" -> "458 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"450 bert/encoder/layer_0/attention/output/LayerNorm/moments/StopGradient" -> "451 bert/encoder/layer_0/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"451 bert/encoder/layer_0/attention/output/LayerNorm/moments/SquaredDifference" -> "452 bert/encoder/layer_0/attention/output/LayerNorm/moments/SquaredDifference__309" [label="[]", style=solid]; -"452 bert/encoder/layer_0/attention/output/LayerNorm/moments/SquaredDifference__309" -> "453 bert/encoder/layer_0/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"453 bert/encoder/layer_0/attention/output/LayerNorm/moments/variance" -> "454 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"454 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add" -> "455 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"455 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/Rsqrt" -> "456 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/Rsqrt__311" [label="[]", style=solid]; -"456 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/Rsqrt__311" -> "457 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"457 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul" -> "458 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"457 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul" -> "460 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"458 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_2" -> "459 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"459 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/sub" -> "461 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"460 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_1" -> "461 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"461 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1" -> "462 QuantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"461 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1" -> "482 bert/encoder/layer_0/output/add" [label="[]", style=solid]; -"462 QuantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "463 DequantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"463 DequantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "466 bert/encoder/layer_0/intermediate/dense/MatMul" [label="[]", style=solid]; -"464 QuantizeLinear_bert/encoder/layer_0/intermediate/dense/kernel^0_1" -> "465 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; -"465 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/kernel^0_1" -> "466 bert/encoder/layer_0/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; -"466 bert/encoder/layer_0/intermediate/dense/MatMul" -> "467 bert/encoder/layer_0/intermediate/dense/BiasAdd" [label="[]", style=solid]; -"467 bert/encoder/layer_0/intermediate/dense/BiasAdd" -> "468 bert/encoder/layer_0/intermediate/dense/Pow" [label="[]", style=solid]; -"467 bert/encoder/layer_0/intermediate/dense/BiasAdd" -> "470 bert/encoder/layer_0/intermediate/dense/add" [label="[]", style=solid]; -"467 bert/encoder/layer_0/intermediate/dense/BiasAdd" -> "475 bert/encoder/layer_0/intermediate/dense/mul_3" [label="[]", style=solid]; -"468 bert/encoder/layer_0/intermediate/dense/Pow" -> "469 bert/encoder/layer_0/intermediate/dense/mul" [label="[]", style=solid]; -"469 bert/encoder/layer_0/intermediate/dense/mul" -> "470 bert/encoder/layer_0/intermediate/dense/add" [label="[]", style=solid]; -"470 bert/encoder/layer_0/intermediate/dense/add" -> "471 bert/encoder/layer_0/intermediate/dense/mul_1" [label="[]", style=solid]; -"471 bert/encoder/layer_0/intermediate/dense/mul_1" -> "472 bert/encoder/layer_0/intermediate/dense/Tanh" [label="[]", style=solid]; -"472 bert/encoder/layer_0/intermediate/dense/Tanh" -> "473 bert/encoder/layer_0/intermediate/dense/add_1" [label="[]", style=solid]; -"473 bert/encoder/layer_0/intermediate/dense/add_1" -> "474 bert/encoder/layer_0/intermediate/dense/mul_2" [label="[]", style=solid]; -"474 bert/encoder/layer_0/intermediate/dense/mul_2" -> "475 bert/encoder/layer_0/intermediate/dense/mul_3" [label="[]", style=solid]; -"475 bert/encoder/layer_0/intermediate/dense/mul_3" -> "476 QuantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; -"476 QuantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3^0_1" -> "477 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; -"477 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3^0_1" -> "480 bert/encoder/layer_0/output/dense/MatMul" [label="[]", style=solid]; -"478 QuantizeLinear_bert/encoder/layer_0/output/dense/kernel^0_1" -> "479 DequantizeLinear_bert/encoder/layer_0/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; -"479 DequantizeLinear_bert/encoder/layer_0/output/dense/kernel^0_1" -> "480 bert/encoder/layer_0/output/dense/MatMul" [label="[3072, 768]", style=solid]; -"480 bert/encoder/layer_0/output/dense/MatMul" -> "481 bert/encoder/layer_0/output/dense/BiasAdd" [label="[]", style=solid]; -"481 bert/encoder/layer_0/output/dense/BiasAdd" -> "482 bert/encoder/layer_0/output/add" [label="[]", style=solid]; -"482 bert/encoder/layer_0/output/add" -> "483 bert/encoder/layer_0/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"482 bert/encoder/layer_0/output/add" -> "485 bert/encoder/layer_0/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"482 bert/encoder/layer_0/output/add" -> "494 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"483 bert/encoder/layer_0/output/LayerNorm/moments/mean" -> "484 bert/encoder/layer_0/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"483 bert/encoder/layer_0/output/LayerNorm/moments/mean" -> "492 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"484 bert/encoder/layer_0/output/LayerNorm/moments/StopGradient" -> "485 bert/encoder/layer_0/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"485 bert/encoder/layer_0/output/LayerNorm/moments/SquaredDifference" -> "486 bert/encoder/layer_0/output/LayerNorm/moments/SquaredDifference__313" [label="[]", style=solid]; -"486 bert/encoder/layer_0/output/LayerNorm/moments/SquaredDifference__313" -> "487 bert/encoder/layer_0/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"487 bert/encoder/layer_0/output/LayerNorm/moments/variance" -> "488 bert/encoder/layer_0/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"488 bert/encoder/layer_0/output/LayerNorm/batchnorm/add" -> "489 bert/encoder/layer_0/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"489 bert/encoder/layer_0/output/LayerNorm/batchnorm/Rsqrt" -> "490 bert/encoder/layer_0/output/LayerNorm/batchnorm/Rsqrt__315" [label="[]", style=solid]; -"490 bert/encoder/layer_0/output/LayerNorm/batchnorm/Rsqrt__315" -> "491 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"491 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul" -> "492 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"491 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul" -> "494 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"492 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_2" -> "493 bert/encoder/layer_0/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"493 bert/encoder/layer_0/output/LayerNorm/batchnorm/sub" -> "495 bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"494 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_1" -> "495 bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"495 bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1" -> "496 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"495 bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1" -> "500 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; -"495 bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1" -> "502 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; -"495 bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1" -> "538 bert/encoder/layer_1/attention/output/add" [label="[]", style=solid]; -"496 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_1" -> "497 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"497 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_1" -> "504 bert/encoder/layer_1/attention/self/value/MatMul" [label="[]", style=solid]; -"498 QuantizeLinear_bert/encoder/layer_1/attention/self/value/kernel^0_1" -> "499 DequantizeLinear_bert/encoder/layer_1/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; -"499 DequantizeLinear_bert/encoder/layer_1/attention/self/value/kernel^0_1" -> "504 bert/encoder/layer_1/attention/self/value/MatMul" [label="[768, 768]", style=solid]; -"500 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_2" -> "501 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; -"501 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_2" -> "510 bert/encoder/layer_1/attention/self/query/MatMul" [label="[]", style=solid]; -"502 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_3" -> "503 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; -"503 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_3" -> "518 bert/encoder/layer_1/attention/self/key/MatMul" [label="[]", style=solid]; -"504 bert/encoder/layer_1/attention/self/value/MatMul" -> "505 bert/encoder/layer_1/attention/self/value/BiasAdd" [label="[]", style=solid]; -"505 bert/encoder/layer_1/attention/self/value/BiasAdd" -> "506 bert/encoder/layer_1/attention/self/Reshape_2" [label="[]", style=solid]; -"506 bert/encoder/layer_1/attention/self/Reshape_2" -> "507 bert/encoder/layer_1/attention/self/transpose_2" [label="[]", style=solid]; -"507 bert/encoder/layer_1/attention/self/transpose_2" -> "529 bert/encoder/layer_1/attention/self/MatMul_1" [label="[]", style=solid]; -"508 QuantizeLinear_bert/encoder/layer_1/attention/self/query/kernel^0_1" -> "509 DequantizeLinear_bert/encoder/layer_1/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; -"509 DequantizeLinear_bert/encoder/layer_1/attention/self/query/kernel^0_1" -> "510 bert/encoder/layer_1/attention/self/query/MatMul" [label="[768, 768]", style=solid]; -"510 bert/encoder/layer_1/attention/self/query/MatMul" -> "511 bert/encoder/layer_1/attention/self/query/BiasAdd" [label="[]", style=solid]; -"511 bert/encoder/layer_1/attention/self/query/BiasAdd" -> "512 QuantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; -"512 QuantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd^0_1" -> "513 DequantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; -"513 DequantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd^0_1" -> "514 bert/encoder/layer_1/attention/self/Reshape" [label="[]", style=solid]; -"514 bert/encoder/layer_1/attention/self/Reshape" -> "515 bert/encoder/layer_1/attention/self/transpose" [label="[]", style=solid]; -"515 bert/encoder/layer_1/attention/self/transpose" -> "525 bert/encoder/layer_1/attention/self/MatMul" [label="[]", style=solid]; -"516 QuantizeLinear_bert/encoder/layer_1/attention/self/key/kernel^0_1" -> "517 DequantizeLinear_bert/encoder/layer_1/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; -"517 DequantizeLinear_bert/encoder/layer_1/attention/self/key/kernel^0_1" -> "518 bert/encoder/layer_1/attention/self/key/MatMul" [label="[768, 768]", style=solid]; -"518 bert/encoder/layer_1/attention/self/key/MatMul" -> "519 bert/encoder/layer_1/attention/self/key/BiasAdd" [label="[]", style=solid]; -"519 bert/encoder/layer_1/attention/self/key/BiasAdd" -> "520 QuantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; -"520 QuantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd^0_1" -> "521 DequantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; -"521 DequantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd^0_1" -> "522 bert/encoder/layer_1/attention/self/Reshape_1" [label="[]", style=solid]; -"522 bert/encoder/layer_1/attention/self/Reshape_1" -> "523 bert/encoder/layer_1/attention/self/transpose_1" [label="[]", style=solid]; -"523 bert/encoder/layer_1/attention/self/transpose_1" -> "524 bert/encoder/layer_1/attention/self/MatMul__320" [label="[]", style=solid]; -"524 bert/encoder/layer_1/attention/self/MatMul__320" -> "525 bert/encoder/layer_1/attention/self/MatMul" [label="[]", style=solid]; -"525 bert/encoder/layer_1/attention/self/MatMul" -> "526 bert/encoder/layer_1/attention/self/Mul" [label="[]", style=solid]; -"526 bert/encoder/layer_1/attention/self/Mul" -> "527 bert/encoder/layer_1/attention/self/add" [label="[]", style=solid]; -"527 bert/encoder/layer_1/attention/self/add" -> "528 bert/encoder/layer_1/attention/self/Softmax" [label="[]", style=solid]; -"528 bert/encoder/layer_1/attention/self/Softmax" -> "529 bert/encoder/layer_1/attention/self/MatMul_1" [label="[]", style=solid]; -"529 bert/encoder/layer_1/attention/self/MatMul_1" -> "530 QuantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1^0_1" [label="[]", style=solid]; -"530 QuantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1^0_1" -> "531 DequantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; -"531 DequantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1^0_1" -> "532 bert/encoder/layer_1/attention/self/transpose_3" [label="[]", style=solid]; -"532 bert/encoder/layer_1/attention/self/transpose_3" -> "533 bert/encoder/layer_1/attention/self/Reshape_3" [label="[]", style=solid]; -"533 bert/encoder/layer_1/attention/self/Reshape_3" -> "536 bert/encoder/layer_1/attention/output/dense/MatMul" [label="[]", style=solid]; -"534 QuantizeLinear_bert/encoder/layer_1/attention/output/dense/kernel^0_1" -> "535 DequantizeLinear_bert/encoder/layer_1/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; -"535 DequantizeLinear_bert/encoder/layer_1/attention/output/dense/kernel^0_1" -> "536 bert/encoder/layer_1/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; -"536 bert/encoder/layer_1/attention/output/dense/MatMul" -> "537 bert/encoder/layer_1/attention/output/dense/BiasAdd" [label="[]", style=solid]; -"537 bert/encoder/layer_1/attention/output/dense/BiasAdd" -> "538 bert/encoder/layer_1/attention/output/add" [label="[]", style=solid]; -"538 bert/encoder/layer_1/attention/output/add" -> "539 bert/encoder/layer_1/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"538 bert/encoder/layer_1/attention/output/add" -> "541 bert/encoder/layer_1/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"538 bert/encoder/layer_1/attention/output/add" -> "550 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"539 bert/encoder/layer_1/attention/output/LayerNorm/moments/mean" -> "540 bert/encoder/layer_1/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"539 bert/encoder/layer_1/attention/output/LayerNorm/moments/mean" -> "548 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"540 bert/encoder/layer_1/attention/output/LayerNorm/moments/StopGradient" -> "541 bert/encoder/layer_1/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"541 bert/encoder/layer_1/attention/output/LayerNorm/moments/SquaredDifference" -> "542 bert/encoder/layer_1/attention/output/LayerNorm/moments/SquaredDifference__323" [label="[]", style=solid]; -"542 bert/encoder/layer_1/attention/output/LayerNorm/moments/SquaredDifference__323" -> "543 bert/encoder/layer_1/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"543 bert/encoder/layer_1/attention/output/LayerNorm/moments/variance" -> "544 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"544 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add" -> "545 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"545 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/Rsqrt" -> "546 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/Rsqrt__325" [label="[]", style=solid]; -"546 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/Rsqrt__325" -> "547 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"547 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul" -> "548 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"547 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul" -> "550 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"548 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_2" -> "549 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"549 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/sub" -> "551 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"550 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_1" -> "551 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"551 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1" -> "552 QuantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"551 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1" -> "572 bert/encoder/layer_1/output/add" [label="[]", style=solid]; -"552 QuantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "553 DequantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"553 DequantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "556 bert/encoder/layer_1/intermediate/dense/MatMul" [label="[]", style=solid]; -"554 QuantizeLinear_bert/encoder/layer_1/intermediate/dense/kernel^0_1" -> "555 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; -"555 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/kernel^0_1" -> "556 bert/encoder/layer_1/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; -"556 bert/encoder/layer_1/intermediate/dense/MatMul" -> "557 bert/encoder/layer_1/intermediate/dense/BiasAdd" [label="[]", style=solid]; -"557 bert/encoder/layer_1/intermediate/dense/BiasAdd" -> "558 bert/encoder/layer_1/intermediate/dense/Pow" [label="[]", style=solid]; -"557 bert/encoder/layer_1/intermediate/dense/BiasAdd" -> "560 bert/encoder/layer_1/intermediate/dense/add" [label="[]", style=solid]; -"557 bert/encoder/layer_1/intermediate/dense/BiasAdd" -> "565 bert/encoder/layer_1/intermediate/dense/mul_3" [label="[]", style=solid]; -"558 bert/encoder/layer_1/intermediate/dense/Pow" -> "559 bert/encoder/layer_1/intermediate/dense/mul" [label="[]", style=solid]; -"559 bert/encoder/layer_1/intermediate/dense/mul" -> "560 bert/encoder/layer_1/intermediate/dense/add" [label="[]", style=solid]; -"560 bert/encoder/layer_1/intermediate/dense/add" -> "561 bert/encoder/layer_1/intermediate/dense/mul_1" [label="[]", style=solid]; -"561 bert/encoder/layer_1/intermediate/dense/mul_1" -> "562 bert/encoder/layer_1/intermediate/dense/Tanh" [label="[]", style=solid]; -"562 bert/encoder/layer_1/intermediate/dense/Tanh" -> "563 bert/encoder/layer_1/intermediate/dense/add_1" [label="[]", style=solid]; -"563 bert/encoder/layer_1/intermediate/dense/add_1" -> "564 bert/encoder/layer_1/intermediate/dense/mul_2" [label="[]", style=solid]; -"564 bert/encoder/layer_1/intermediate/dense/mul_2" -> "565 bert/encoder/layer_1/intermediate/dense/mul_3" [label="[]", style=solid]; -"565 bert/encoder/layer_1/intermediate/dense/mul_3" -> "566 QuantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; -"566 QuantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3^0_1" -> "567 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; -"567 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3^0_1" -> "570 bert/encoder/layer_1/output/dense/MatMul" [label="[]", style=solid]; -"568 QuantizeLinear_bert/encoder/layer_1/output/dense/kernel^0_1" -> "569 DequantizeLinear_bert/encoder/layer_1/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; -"569 DequantizeLinear_bert/encoder/layer_1/output/dense/kernel^0_1" -> "570 bert/encoder/layer_1/output/dense/MatMul" [label="[3072, 768]", style=solid]; -"570 bert/encoder/layer_1/output/dense/MatMul" -> "571 bert/encoder/layer_1/output/dense/BiasAdd" [label="[]", style=solid]; -"571 bert/encoder/layer_1/output/dense/BiasAdd" -> "572 bert/encoder/layer_1/output/add" [label="[]", style=solid]; -"572 bert/encoder/layer_1/output/add" -> "573 bert/encoder/layer_1/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"572 bert/encoder/layer_1/output/add" -> "575 bert/encoder/layer_1/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"572 bert/encoder/layer_1/output/add" -> "584 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"573 bert/encoder/layer_1/output/LayerNorm/moments/mean" -> "574 bert/encoder/layer_1/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"573 bert/encoder/layer_1/output/LayerNorm/moments/mean" -> "582 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"574 bert/encoder/layer_1/output/LayerNorm/moments/StopGradient" -> "575 bert/encoder/layer_1/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"575 bert/encoder/layer_1/output/LayerNorm/moments/SquaredDifference" -> "576 bert/encoder/layer_1/output/LayerNorm/moments/SquaredDifference__327" [label="[]", style=solid]; -"576 bert/encoder/layer_1/output/LayerNorm/moments/SquaredDifference__327" -> "577 bert/encoder/layer_1/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"577 bert/encoder/layer_1/output/LayerNorm/moments/variance" -> "578 bert/encoder/layer_1/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"578 bert/encoder/layer_1/output/LayerNorm/batchnorm/add" -> "579 bert/encoder/layer_1/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"579 bert/encoder/layer_1/output/LayerNorm/batchnorm/Rsqrt" -> "580 bert/encoder/layer_1/output/LayerNorm/batchnorm/Rsqrt__329" [label="[]", style=solid]; -"580 bert/encoder/layer_1/output/LayerNorm/batchnorm/Rsqrt__329" -> "581 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"581 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul" -> "582 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"581 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul" -> "584 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"582 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_2" -> "583 bert/encoder/layer_1/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"583 bert/encoder/layer_1/output/LayerNorm/batchnorm/sub" -> "585 bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"584 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_1" -> "585 bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"585 bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1" -> "586 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"585 bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1" -> "590 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; -"585 bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1" -> "592 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; -"585 bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1" -> "628 bert/encoder/layer_2/attention/output/add" [label="[]", style=solid]; -"586 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_1" -> "587 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"587 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_1" -> "594 bert/encoder/layer_2/attention/self/value/MatMul" [label="[]", style=solid]; -"588 QuantizeLinear_bert/encoder/layer_2/attention/self/value/kernel^0_1" -> "589 DequantizeLinear_bert/encoder/layer_2/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; -"589 DequantizeLinear_bert/encoder/layer_2/attention/self/value/kernel^0_1" -> "594 bert/encoder/layer_2/attention/self/value/MatMul" [label="[768, 768]", style=solid]; -"590 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_2" -> "591 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; -"591 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_2" -> "600 bert/encoder/layer_2/attention/self/query/MatMul" [label="[]", style=solid]; -"592 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_3" -> "593 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; -"593 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_3" -> "608 bert/encoder/layer_2/attention/self/key/MatMul" [label="[]", style=solid]; -"594 bert/encoder/layer_2/attention/self/value/MatMul" -> "595 bert/encoder/layer_2/attention/self/value/BiasAdd" [label="[]", style=solid]; -"595 bert/encoder/layer_2/attention/self/value/BiasAdd" -> "596 bert/encoder/layer_2/attention/self/Reshape_2" [label="[]", style=solid]; -"596 bert/encoder/layer_2/attention/self/Reshape_2" -> "597 bert/encoder/layer_2/attention/self/transpose_2" [label="[]", style=solid]; -"597 bert/encoder/layer_2/attention/self/transpose_2" -> "619 bert/encoder/layer_2/attention/self/MatMul_1" [label="[]", style=solid]; -"598 QuantizeLinear_bert/encoder/layer_2/attention/self/query/kernel^0_1" -> "599 DequantizeLinear_bert/encoder/layer_2/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; -"599 DequantizeLinear_bert/encoder/layer_2/attention/self/query/kernel^0_1" -> "600 bert/encoder/layer_2/attention/self/query/MatMul" [label="[768, 768]", style=solid]; -"600 bert/encoder/layer_2/attention/self/query/MatMul" -> "601 bert/encoder/layer_2/attention/self/query/BiasAdd" [label="[]", style=solid]; -"601 bert/encoder/layer_2/attention/self/query/BiasAdd" -> "602 QuantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; -"602 QuantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd^0_1" -> "603 DequantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; -"603 DequantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd^0_1" -> "604 bert/encoder/layer_2/attention/self/Reshape" [label="[]", style=solid]; -"604 bert/encoder/layer_2/attention/self/Reshape" -> "605 bert/encoder/layer_2/attention/self/transpose" [label="[]", style=solid]; -"605 bert/encoder/layer_2/attention/self/transpose" -> "615 bert/encoder/layer_2/attention/self/MatMul" [label="[]", style=solid]; -"606 QuantizeLinear_bert/encoder/layer_2/attention/self/key/kernel^0_1" -> "607 DequantizeLinear_bert/encoder/layer_2/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; -"607 DequantizeLinear_bert/encoder/layer_2/attention/self/key/kernel^0_1" -> "608 bert/encoder/layer_2/attention/self/key/MatMul" [label="[768, 768]", style=solid]; -"608 bert/encoder/layer_2/attention/self/key/MatMul" -> "609 bert/encoder/layer_2/attention/self/key/BiasAdd" [label="[]", style=solid]; -"609 bert/encoder/layer_2/attention/self/key/BiasAdd" -> "610 QuantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; -"610 QuantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd^0_1" -> "611 DequantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; -"611 DequantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd^0_1" -> "612 bert/encoder/layer_2/attention/self/Reshape_1" [label="[]", style=solid]; -"612 bert/encoder/layer_2/attention/self/Reshape_1" -> "613 bert/encoder/layer_2/attention/self/transpose_1" [label="[]", style=solid]; -"613 bert/encoder/layer_2/attention/self/transpose_1" -> "614 bert/encoder/layer_2/attention/self/MatMul__334" [label="[]", style=solid]; -"614 bert/encoder/layer_2/attention/self/MatMul__334" -> "615 bert/encoder/layer_2/attention/self/MatMul" [label="[]", style=solid]; -"615 bert/encoder/layer_2/attention/self/MatMul" -> "616 bert/encoder/layer_2/attention/self/Mul" [label="[]", style=solid]; -"616 bert/encoder/layer_2/attention/self/Mul" -> "617 bert/encoder/layer_2/attention/self/add" [label="[]", style=solid]; -"617 bert/encoder/layer_2/attention/self/add" -> "618 bert/encoder/layer_2/attention/self/Softmax" [label="[]", style=solid]; -"618 bert/encoder/layer_2/attention/self/Softmax" -> "619 bert/encoder/layer_2/attention/self/MatMul_1" [label="[]", style=solid]; -"619 bert/encoder/layer_2/attention/self/MatMul_1" -> "620 QuantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1^0_1" [label="[]", style=solid]; -"620 QuantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1^0_1" -> "621 DequantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; -"621 DequantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1^0_1" -> "622 bert/encoder/layer_2/attention/self/transpose_3" [label="[]", style=solid]; -"622 bert/encoder/layer_2/attention/self/transpose_3" -> "623 bert/encoder/layer_2/attention/self/Reshape_3" [label="[]", style=solid]; -"623 bert/encoder/layer_2/attention/self/Reshape_3" -> "626 bert/encoder/layer_2/attention/output/dense/MatMul" [label="[]", style=solid]; -"624 QuantizeLinear_bert/encoder/layer_2/attention/output/dense/kernel^0_1" -> "625 DequantizeLinear_bert/encoder/layer_2/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; -"625 DequantizeLinear_bert/encoder/layer_2/attention/output/dense/kernel^0_1" -> "626 bert/encoder/layer_2/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; -"626 bert/encoder/layer_2/attention/output/dense/MatMul" -> "627 bert/encoder/layer_2/attention/output/dense/BiasAdd" [label="[]", style=solid]; -"627 bert/encoder/layer_2/attention/output/dense/BiasAdd" -> "628 bert/encoder/layer_2/attention/output/add" [label="[]", style=solid]; -"628 bert/encoder/layer_2/attention/output/add" -> "629 bert/encoder/layer_2/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"628 bert/encoder/layer_2/attention/output/add" -> "631 bert/encoder/layer_2/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"628 bert/encoder/layer_2/attention/output/add" -> "640 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"629 bert/encoder/layer_2/attention/output/LayerNorm/moments/mean" -> "630 bert/encoder/layer_2/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"629 bert/encoder/layer_2/attention/output/LayerNorm/moments/mean" -> "638 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"630 bert/encoder/layer_2/attention/output/LayerNorm/moments/StopGradient" -> "631 bert/encoder/layer_2/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"631 bert/encoder/layer_2/attention/output/LayerNorm/moments/SquaredDifference" -> "632 bert/encoder/layer_2/attention/output/LayerNorm/moments/SquaredDifference__337" [label="[]", style=solid]; -"632 bert/encoder/layer_2/attention/output/LayerNorm/moments/SquaredDifference__337" -> "633 bert/encoder/layer_2/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"633 bert/encoder/layer_2/attention/output/LayerNorm/moments/variance" -> "634 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"634 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add" -> "635 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"635 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/Rsqrt" -> "636 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/Rsqrt__339" [label="[]", style=solid]; -"636 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/Rsqrt__339" -> "637 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"637 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul" -> "638 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"637 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul" -> "640 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"638 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_2" -> "639 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"639 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/sub" -> "641 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"640 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_1" -> "641 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"641 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1" -> "642 QuantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"641 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1" -> "662 bert/encoder/layer_2/output/add" [label="[]", style=solid]; -"642 QuantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "643 DequantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"643 DequantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "646 bert/encoder/layer_2/intermediate/dense/MatMul" [label="[]", style=solid]; -"644 QuantizeLinear_bert/encoder/layer_2/intermediate/dense/kernel^0_1" -> "645 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; -"645 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/kernel^0_1" -> "646 bert/encoder/layer_2/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; -"646 bert/encoder/layer_2/intermediate/dense/MatMul" -> "647 bert/encoder/layer_2/intermediate/dense/BiasAdd" [label="[]", style=solid]; -"647 bert/encoder/layer_2/intermediate/dense/BiasAdd" -> "648 bert/encoder/layer_2/intermediate/dense/Pow" [label="[]", style=solid]; -"647 bert/encoder/layer_2/intermediate/dense/BiasAdd" -> "650 bert/encoder/layer_2/intermediate/dense/add" [label="[]", style=solid]; -"647 bert/encoder/layer_2/intermediate/dense/BiasAdd" -> "655 bert/encoder/layer_2/intermediate/dense/mul_3" [label="[]", style=solid]; -"648 bert/encoder/layer_2/intermediate/dense/Pow" -> "649 bert/encoder/layer_2/intermediate/dense/mul" [label="[]", style=solid]; -"649 bert/encoder/layer_2/intermediate/dense/mul" -> "650 bert/encoder/layer_2/intermediate/dense/add" [label="[]", style=solid]; -"650 bert/encoder/layer_2/intermediate/dense/add" -> "651 bert/encoder/layer_2/intermediate/dense/mul_1" [label="[]", style=solid]; -"651 bert/encoder/layer_2/intermediate/dense/mul_1" -> "652 bert/encoder/layer_2/intermediate/dense/Tanh" [label="[]", style=solid]; -"652 bert/encoder/layer_2/intermediate/dense/Tanh" -> "653 bert/encoder/layer_2/intermediate/dense/add_1" [label="[]", style=solid]; -"653 bert/encoder/layer_2/intermediate/dense/add_1" -> "654 bert/encoder/layer_2/intermediate/dense/mul_2" [label="[]", style=solid]; -"654 bert/encoder/layer_2/intermediate/dense/mul_2" -> "655 bert/encoder/layer_2/intermediate/dense/mul_3" [label="[]", style=solid]; -"655 bert/encoder/layer_2/intermediate/dense/mul_3" -> "656 QuantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; -"656 QuantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3^0_1" -> "657 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; -"657 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3^0_1" -> "660 bert/encoder/layer_2/output/dense/MatMul" [label="[]", style=solid]; -"658 QuantizeLinear_bert/encoder/layer_2/output/dense/kernel^0_1" -> "659 DequantizeLinear_bert/encoder/layer_2/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; -"659 DequantizeLinear_bert/encoder/layer_2/output/dense/kernel^0_1" -> "660 bert/encoder/layer_2/output/dense/MatMul" [label="[3072, 768]", style=solid]; -"660 bert/encoder/layer_2/output/dense/MatMul" -> "661 bert/encoder/layer_2/output/dense/BiasAdd" [label="[]", style=solid]; -"661 bert/encoder/layer_2/output/dense/BiasAdd" -> "662 bert/encoder/layer_2/output/add" [label="[]", style=solid]; -"662 bert/encoder/layer_2/output/add" -> "663 bert/encoder/layer_2/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"662 bert/encoder/layer_2/output/add" -> "665 bert/encoder/layer_2/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"662 bert/encoder/layer_2/output/add" -> "674 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"663 bert/encoder/layer_2/output/LayerNorm/moments/mean" -> "664 bert/encoder/layer_2/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"663 bert/encoder/layer_2/output/LayerNorm/moments/mean" -> "672 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"664 bert/encoder/layer_2/output/LayerNorm/moments/StopGradient" -> "665 bert/encoder/layer_2/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"665 bert/encoder/layer_2/output/LayerNorm/moments/SquaredDifference" -> "666 bert/encoder/layer_2/output/LayerNorm/moments/SquaredDifference__341" [label="[]", style=solid]; -"666 bert/encoder/layer_2/output/LayerNorm/moments/SquaredDifference__341" -> "667 bert/encoder/layer_2/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"667 bert/encoder/layer_2/output/LayerNorm/moments/variance" -> "668 bert/encoder/layer_2/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"668 bert/encoder/layer_2/output/LayerNorm/batchnorm/add" -> "669 bert/encoder/layer_2/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"669 bert/encoder/layer_2/output/LayerNorm/batchnorm/Rsqrt" -> "670 bert/encoder/layer_2/output/LayerNorm/batchnorm/Rsqrt__343" [label="[]", style=solid]; -"670 bert/encoder/layer_2/output/LayerNorm/batchnorm/Rsqrt__343" -> "671 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"671 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul" -> "672 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"671 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul" -> "674 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"672 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_2" -> "673 bert/encoder/layer_2/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"673 bert/encoder/layer_2/output/LayerNorm/batchnorm/sub" -> "675 bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"674 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_1" -> "675 bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"675 bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1" -> "676 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"675 bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1" -> "680 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; -"675 bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1" -> "682 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; -"675 bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1" -> "718 bert/encoder/layer_3/attention/output/add" [label="[]", style=solid]; -"676 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_1" -> "677 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"677 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_1" -> "684 bert/encoder/layer_3/attention/self/value/MatMul" [label="[]", style=solid]; -"678 QuantizeLinear_bert/encoder/layer_3/attention/self/value/kernel^0_1" -> "679 DequantizeLinear_bert/encoder/layer_3/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; -"679 DequantizeLinear_bert/encoder/layer_3/attention/self/value/kernel^0_1" -> "684 bert/encoder/layer_3/attention/self/value/MatMul" [label="[768, 768]", style=solid]; -"680 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_2" -> "681 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; -"681 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_2" -> "690 bert/encoder/layer_3/attention/self/query/MatMul" [label="[]", style=solid]; -"682 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_3" -> "683 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; -"683 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_3" -> "698 bert/encoder/layer_3/attention/self/key/MatMul" [label="[]", style=solid]; -"684 bert/encoder/layer_3/attention/self/value/MatMul" -> "685 bert/encoder/layer_3/attention/self/value/BiasAdd" [label="[]", style=solid]; -"685 bert/encoder/layer_3/attention/self/value/BiasAdd" -> "686 bert/encoder/layer_3/attention/self/Reshape_2" [label="[]", style=solid]; -"686 bert/encoder/layer_3/attention/self/Reshape_2" -> "687 bert/encoder/layer_3/attention/self/transpose_2" [label="[]", style=solid]; -"687 bert/encoder/layer_3/attention/self/transpose_2" -> "709 bert/encoder/layer_3/attention/self/MatMul_1" [label="[]", style=solid]; -"688 QuantizeLinear_bert/encoder/layer_3/attention/self/query/kernel^0_1" -> "689 DequantizeLinear_bert/encoder/layer_3/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; -"689 DequantizeLinear_bert/encoder/layer_3/attention/self/query/kernel^0_1" -> "690 bert/encoder/layer_3/attention/self/query/MatMul" [label="[768, 768]", style=solid]; -"690 bert/encoder/layer_3/attention/self/query/MatMul" -> "691 bert/encoder/layer_3/attention/self/query/BiasAdd" [label="[]", style=solid]; -"691 bert/encoder/layer_3/attention/self/query/BiasAdd" -> "692 QuantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; -"692 QuantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd^0_1" -> "693 DequantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; -"693 DequantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd^0_1" -> "694 bert/encoder/layer_3/attention/self/Reshape" [label="[]", style=solid]; -"694 bert/encoder/layer_3/attention/self/Reshape" -> "695 bert/encoder/layer_3/attention/self/transpose" [label="[]", style=solid]; -"695 bert/encoder/layer_3/attention/self/transpose" -> "705 bert/encoder/layer_3/attention/self/MatMul" [label="[]", style=solid]; -"696 QuantizeLinear_bert/encoder/layer_3/attention/self/key/kernel^0_1" -> "697 DequantizeLinear_bert/encoder/layer_3/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; -"697 DequantizeLinear_bert/encoder/layer_3/attention/self/key/kernel^0_1" -> "698 bert/encoder/layer_3/attention/self/key/MatMul" [label="[768, 768]", style=solid]; -"698 bert/encoder/layer_3/attention/self/key/MatMul" -> "699 bert/encoder/layer_3/attention/self/key/BiasAdd" [label="[]", style=solid]; -"699 bert/encoder/layer_3/attention/self/key/BiasAdd" -> "700 QuantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; -"700 QuantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd^0_1" -> "701 DequantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; -"701 DequantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd^0_1" -> "702 bert/encoder/layer_3/attention/self/Reshape_1" [label="[]", style=solid]; -"702 bert/encoder/layer_3/attention/self/Reshape_1" -> "703 bert/encoder/layer_3/attention/self/transpose_1" [label="[]", style=solid]; -"703 bert/encoder/layer_3/attention/self/transpose_1" -> "704 bert/encoder/layer_3/attention/self/MatMul__348" [label="[]", style=solid]; -"704 bert/encoder/layer_3/attention/self/MatMul__348" -> "705 bert/encoder/layer_3/attention/self/MatMul" [label="[]", style=solid]; -"705 bert/encoder/layer_3/attention/self/MatMul" -> "706 bert/encoder/layer_3/attention/self/Mul" [label="[]", style=solid]; -"706 bert/encoder/layer_3/attention/self/Mul" -> "707 bert/encoder/layer_3/attention/self/add" [label="[]", style=solid]; -"707 bert/encoder/layer_3/attention/self/add" -> "708 bert/encoder/layer_3/attention/self/Softmax" [label="[]", style=solid]; -"708 bert/encoder/layer_3/attention/self/Softmax" -> "709 bert/encoder/layer_3/attention/self/MatMul_1" [label="[]", style=solid]; -"709 bert/encoder/layer_3/attention/self/MatMul_1" -> "710 QuantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1^0_1" [label="[]", style=solid]; -"710 QuantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1^0_1" -> "711 DequantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; -"711 DequantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1^0_1" -> "712 bert/encoder/layer_3/attention/self/transpose_3" [label="[]", style=solid]; -"712 bert/encoder/layer_3/attention/self/transpose_3" -> "713 bert/encoder/layer_3/attention/self/Reshape_3" [label="[]", style=solid]; -"713 bert/encoder/layer_3/attention/self/Reshape_3" -> "716 bert/encoder/layer_3/attention/output/dense/MatMul" [label="[]", style=solid]; -"714 QuantizeLinear_bert/encoder/layer_3/attention/output/dense/kernel^0_1" -> "715 DequantizeLinear_bert/encoder/layer_3/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; -"715 DequantizeLinear_bert/encoder/layer_3/attention/output/dense/kernel^0_1" -> "716 bert/encoder/layer_3/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; -"716 bert/encoder/layer_3/attention/output/dense/MatMul" -> "717 bert/encoder/layer_3/attention/output/dense/BiasAdd" [label="[]", style=solid]; -"717 bert/encoder/layer_3/attention/output/dense/BiasAdd" -> "718 bert/encoder/layer_3/attention/output/add" [label="[]", style=solid]; -"718 bert/encoder/layer_3/attention/output/add" -> "719 bert/encoder/layer_3/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"718 bert/encoder/layer_3/attention/output/add" -> "721 bert/encoder/layer_3/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"718 bert/encoder/layer_3/attention/output/add" -> "730 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"719 bert/encoder/layer_3/attention/output/LayerNorm/moments/mean" -> "720 bert/encoder/layer_3/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"719 bert/encoder/layer_3/attention/output/LayerNorm/moments/mean" -> "728 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"720 bert/encoder/layer_3/attention/output/LayerNorm/moments/StopGradient" -> "721 bert/encoder/layer_3/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"721 bert/encoder/layer_3/attention/output/LayerNorm/moments/SquaredDifference" -> "722 bert/encoder/layer_3/attention/output/LayerNorm/moments/SquaredDifference__351" [label="[]", style=solid]; -"722 bert/encoder/layer_3/attention/output/LayerNorm/moments/SquaredDifference__351" -> "723 bert/encoder/layer_3/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"723 bert/encoder/layer_3/attention/output/LayerNorm/moments/variance" -> "724 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"724 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add" -> "725 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"725 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/Rsqrt" -> "726 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/Rsqrt__353" [label="[]", style=solid]; -"726 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/Rsqrt__353" -> "727 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"727 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul" -> "728 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"727 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul" -> "730 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"728 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_2" -> "729 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"729 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/sub" -> "731 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"730 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_1" -> "731 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"731 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1" -> "732 QuantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"731 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1" -> "752 bert/encoder/layer_3/output/add" [label="[]", style=solid]; -"732 QuantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "733 DequantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"733 DequantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "736 bert/encoder/layer_3/intermediate/dense/MatMul" [label="[]", style=solid]; -"734 QuantizeLinear_bert/encoder/layer_3/intermediate/dense/kernel^0_1" -> "735 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; -"735 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/kernel^0_1" -> "736 bert/encoder/layer_3/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; -"736 bert/encoder/layer_3/intermediate/dense/MatMul" -> "737 bert/encoder/layer_3/intermediate/dense/BiasAdd" [label="[]", style=solid]; -"737 bert/encoder/layer_3/intermediate/dense/BiasAdd" -> "738 bert/encoder/layer_3/intermediate/dense/Pow" [label="[]", style=solid]; -"737 bert/encoder/layer_3/intermediate/dense/BiasAdd" -> "740 bert/encoder/layer_3/intermediate/dense/add" [label="[]", style=solid]; -"737 bert/encoder/layer_3/intermediate/dense/BiasAdd" -> "745 bert/encoder/layer_3/intermediate/dense/mul_3" [label="[]", style=solid]; -"738 bert/encoder/layer_3/intermediate/dense/Pow" -> "739 bert/encoder/layer_3/intermediate/dense/mul" [label="[]", style=solid]; -"739 bert/encoder/layer_3/intermediate/dense/mul" -> "740 bert/encoder/layer_3/intermediate/dense/add" [label="[]", style=solid]; -"740 bert/encoder/layer_3/intermediate/dense/add" -> "741 bert/encoder/layer_3/intermediate/dense/mul_1" [label="[]", style=solid]; -"741 bert/encoder/layer_3/intermediate/dense/mul_1" -> "742 bert/encoder/layer_3/intermediate/dense/Tanh" [label="[]", style=solid]; -"742 bert/encoder/layer_3/intermediate/dense/Tanh" -> "743 bert/encoder/layer_3/intermediate/dense/add_1" [label="[]", style=solid]; -"743 bert/encoder/layer_3/intermediate/dense/add_1" -> "744 bert/encoder/layer_3/intermediate/dense/mul_2" [label="[]", style=solid]; -"744 bert/encoder/layer_3/intermediate/dense/mul_2" -> "745 bert/encoder/layer_3/intermediate/dense/mul_3" [label="[]", style=solid]; -"745 bert/encoder/layer_3/intermediate/dense/mul_3" -> "746 QuantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; -"746 QuantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3^0_1" -> "747 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; -"747 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3^0_1" -> "750 bert/encoder/layer_3/output/dense/MatMul" [label="[]", style=solid]; -"748 QuantizeLinear_bert/encoder/layer_3/output/dense/kernel^0_1" -> "749 DequantizeLinear_bert/encoder/layer_3/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; -"749 DequantizeLinear_bert/encoder/layer_3/output/dense/kernel^0_1" -> "750 bert/encoder/layer_3/output/dense/MatMul" [label="[3072, 768]", style=solid]; -"750 bert/encoder/layer_3/output/dense/MatMul" -> "751 bert/encoder/layer_3/output/dense/BiasAdd" [label="[]", style=solid]; -"751 bert/encoder/layer_3/output/dense/BiasAdd" -> "752 bert/encoder/layer_3/output/add" [label="[]", style=solid]; -"752 bert/encoder/layer_3/output/add" -> "753 bert/encoder/layer_3/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"752 bert/encoder/layer_3/output/add" -> "755 bert/encoder/layer_3/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"752 bert/encoder/layer_3/output/add" -> "764 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"753 bert/encoder/layer_3/output/LayerNorm/moments/mean" -> "754 bert/encoder/layer_3/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"753 bert/encoder/layer_3/output/LayerNorm/moments/mean" -> "762 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"754 bert/encoder/layer_3/output/LayerNorm/moments/StopGradient" -> "755 bert/encoder/layer_3/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"755 bert/encoder/layer_3/output/LayerNorm/moments/SquaredDifference" -> "756 bert/encoder/layer_3/output/LayerNorm/moments/SquaredDifference__355" [label="[]", style=solid]; -"756 bert/encoder/layer_3/output/LayerNorm/moments/SquaredDifference__355" -> "757 bert/encoder/layer_3/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"757 bert/encoder/layer_3/output/LayerNorm/moments/variance" -> "758 bert/encoder/layer_3/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"758 bert/encoder/layer_3/output/LayerNorm/batchnorm/add" -> "759 bert/encoder/layer_3/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"759 bert/encoder/layer_3/output/LayerNorm/batchnorm/Rsqrt" -> "760 bert/encoder/layer_3/output/LayerNorm/batchnorm/Rsqrt__357" [label="[]", style=solid]; -"760 bert/encoder/layer_3/output/LayerNorm/batchnorm/Rsqrt__357" -> "761 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"761 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul" -> "762 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"761 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul" -> "764 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"762 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_2" -> "763 bert/encoder/layer_3/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"763 bert/encoder/layer_3/output/LayerNorm/batchnorm/sub" -> "765 bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"764 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_1" -> "765 bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"765 bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1" -> "766 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"765 bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1" -> "770 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; -"765 bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1" -> "772 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; -"765 bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1" -> "808 bert/encoder/layer_4/attention/output/add" [label="[]", style=solid]; -"766 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_1" -> "767 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"767 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_1" -> "774 bert/encoder/layer_4/attention/self/value/MatMul" [label="[]", style=solid]; -"768 QuantizeLinear_bert/encoder/layer_4/attention/self/value/kernel^0_1" -> "769 DequantizeLinear_bert/encoder/layer_4/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; -"769 DequantizeLinear_bert/encoder/layer_4/attention/self/value/kernel^0_1" -> "774 bert/encoder/layer_4/attention/self/value/MatMul" [label="[768, 768]", style=solid]; -"770 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_2" -> "771 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; -"771 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_2" -> "780 bert/encoder/layer_4/attention/self/query/MatMul" [label="[]", style=solid]; -"772 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_3" -> "773 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; -"773 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_3" -> "788 bert/encoder/layer_4/attention/self/key/MatMul" [label="[]", style=solid]; -"774 bert/encoder/layer_4/attention/self/value/MatMul" -> "775 bert/encoder/layer_4/attention/self/value/BiasAdd" [label="[]", style=solid]; -"775 bert/encoder/layer_4/attention/self/value/BiasAdd" -> "776 bert/encoder/layer_4/attention/self/Reshape_2" [label="[]", style=solid]; -"776 bert/encoder/layer_4/attention/self/Reshape_2" -> "777 bert/encoder/layer_4/attention/self/transpose_2" [label="[]", style=solid]; -"777 bert/encoder/layer_4/attention/self/transpose_2" -> "799 bert/encoder/layer_4/attention/self/MatMul_1" [label="[]", style=solid]; -"778 QuantizeLinear_bert/encoder/layer_4/attention/self/query/kernel^0_1" -> "779 DequantizeLinear_bert/encoder/layer_4/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; -"779 DequantizeLinear_bert/encoder/layer_4/attention/self/query/kernel^0_1" -> "780 bert/encoder/layer_4/attention/self/query/MatMul" [label="[768, 768]", style=solid]; -"780 bert/encoder/layer_4/attention/self/query/MatMul" -> "781 bert/encoder/layer_4/attention/self/query/BiasAdd" [label="[]", style=solid]; -"781 bert/encoder/layer_4/attention/self/query/BiasAdd" -> "782 QuantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; -"782 QuantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd^0_1" -> "783 DequantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; -"783 DequantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd^0_1" -> "784 bert/encoder/layer_4/attention/self/Reshape" [label="[]", style=solid]; -"784 bert/encoder/layer_4/attention/self/Reshape" -> "785 bert/encoder/layer_4/attention/self/transpose" [label="[]", style=solid]; -"785 bert/encoder/layer_4/attention/self/transpose" -> "795 bert/encoder/layer_4/attention/self/MatMul" [label="[]", style=solid]; -"786 QuantizeLinear_bert/encoder/layer_4/attention/self/key/kernel^0_1" -> "787 DequantizeLinear_bert/encoder/layer_4/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; -"787 DequantizeLinear_bert/encoder/layer_4/attention/self/key/kernel^0_1" -> "788 bert/encoder/layer_4/attention/self/key/MatMul" [label="[768, 768]", style=solid]; -"788 bert/encoder/layer_4/attention/self/key/MatMul" -> "789 bert/encoder/layer_4/attention/self/key/BiasAdd" [label="[]", style=solid]; -"789 bert/encoder/layer_4/attention/self/key/BiasAdd" -> "790 QuantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; -"790 QuantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd^0_1" -> "791 DequantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; -"791 DequantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd^0_1" -> "792 bert/encoder/layer_4/attention/self/Reshape_1" [label="[]", style=solid]; -"792 bert/encoder/layer_4/attention/self/Reshape_1" -> "793 bert/encoder/layer_4/attention/self/transpose_1" [label="[]", style=solid]; -"793 bert/encoder/layer_4/attention/self/transpose_1" -> "794 bert/encoder/layer_4/attention/self/MatMul__362" [label="[]", style=solid]; -"794 bert/encoder/layer_4/attention/self/MatMul__362" -> "795 bert/encoder/layer_4/attention/self/MatMul" [label="[]", style=solid]; -"795 bert/encoder/layer_4/attention/self/MatMul" -> "796 bert/encoder/layer_4/attention/self/Mul" [label="[]", style=solid]; -"796 bert/encoder/layer_4/attention/self/Mul" -> "797 bert/encoder/layer_4/attention/self/add" [label="[]", style=solid]; -"797 bert/encoder/layer_4/attention/self/add" -> "798 bert/encoder/layer_4/attention/self/Softmax" [label="[]", style=solid]; -"798 bert/encoder/layer_4/attention/self/Softmax" -> "799 bert/encoder/layer_4/attention/self/MatMul_1" [label="[]", style=solid]; -"799 bert/encoder/layer_4/attention/self/MatMul_1" -> "800 QuantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1^0_1" [label="[]", style=solid]; -"800 QuantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1^0_1" -> "801 DequantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; -"801 DequantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1^0_1" -> "802 bert/encoder/layer_4/attention/self/transpose_3" [label="[]", style=solid]; -"802 bert/encoder/layer_4/attention/self/transpose_3" -> "803 bert/encoder/layer_4/attention/self/Reshape_3" [label="[]", style=solid]; -"803 bert/encoder/layer_4/attention/self/Reshape_3" -> "806 bert/encoder/layer_4/attention/output/dense/MatMul" [label="[]", style=solid]; -"804 QuantizeLinear_bert/encoder/layer_4/attention/output/dense/kernel^0_1" -> "805 DequantizeLinear_bert/encoder/layer_4/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; -"805 DequantizeLinear_bert/encoder/layer_4/attention/output/dense/kernel^0_1" -> "806 bert/encoder/layer_4/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; -"806 bert/encoder/layer_4/attention/output/dense/MatMul" -> "807 bert/encoder/layer_4/attention/output/dense/BiasAdd" [label="[]", style=solid]; -"807 bert/encoder/layer_4/attention/output/dense/BiasAdd" -> "808 bert/encoder/layer_4/attention/output/add" [label="[]", style=solid]; -"808 bert/encoder/layer_4/attention/output/add" -> "809 bert/encoder/layer_4/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"808 bert/encoder/layer_4/attention/output/add" -> "811 bert/encoder/layer_4/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"808 bert/encoder/layer_4/attention/output/add" -> "820 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"809 bert/encoder/layer_4/attention/output/LayerNorm/moments/mean" -> "810 bert/encoder/layer_4/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"809 bert/encoder/layer_4/attention/output/LayerNorm/moments/mean" -> "818 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"810 bert/encoder/layer_4/attention/output/LayerNorm/moments/StopGradient" -> "811 bert/encoder/layer_4/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"811 bert/encoder/layer_4/attention/output/LayerNorm/moments/SquaredDifference" -> "812 bert/encoder/layer_4/attention/output/LayerNorm/moments/SquaredDifference__365" [label="[]", style=solid]; -"812 bert/encoder/layer_4/attention/output/LayerNorm/moments/SquaredDifference__365" -> "813 bert/encoder/layer_4/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"813 bert/encoder/layer_4/attention/output/LayerNorm/moments/variance" -> "814 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"814 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add" -> "815 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"815 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/Rsqrt" -> "816 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/Rsqrt__367" [label="[]", style=solid]; -"816 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/Rsqrt__367" -> "817 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"817 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul" -> "818 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"817 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul" -> "820 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"818 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_2" -> "819 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"819 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/sub" -> "821 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"820 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_1" -> "821 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"821 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1" -> "822 QuantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"821 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1" -> "842 bert/encoder/layer_4/output/add" [label="[]", style=solid]; -"822 QuantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "823 DequantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"823 DequantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "826 bert/encoder/layer_4/intermediate/dense/MatMul" [label="[]", style=solid]; -"824 QuantizeLinear_bert/encoder/layer_4/intermediate/dense/kernel^0_1" -> "825 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; -"825 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/kernel^0_1" -> "826 bert/encoder/layer_4/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; -"826 bert/encoder/layer_4/intermediate/dense/MatMul" -> "827 bert/encoder/layer_4/intermediate/dense/BiasAdd" [label="[]", style=solid]; -"827 bert/encoder/layer_4/intermediate/dense/BiasAdd" -> "828 bert/encoder/layer_4/intermediate/dense/Pow" [label="[]", style=solid]; -"827 bert/encoder/layer_4/intermediate/dense/BiasAdd" -> "830 bert/encoder/layer_4/intermediate/dense/add" [label="[]", style=solid]; -"827 bert/encoder/layer_4/intermediate/dense/BiasAdd" -> "835 bert/encoder/layer_4/intermediate/dense/mul_3" [label="[]", style=solid]; -"828 bert/encoder/layer_4/intermediate/dense/Pow" -> "829 bert/encoder/layer_4/intermediate/dense/mul" [label="[]", style=solid]; -"829 bert/encoder/layer_4/intermediate/dense/mul" -> "830 bert/encoder/layer_4/intermediate/dense/add" [label="[]", style=solid]; -"830 bert/encoder/layer_4/intermediate/dense/add" -> "831 bert/encoder/layer_4/intermediate/dense/mul_1" [label="[]", style=solid]; -"831 bert/encoder/layer_4/intermediate/dense/mul_1" -> "832 bert/encoder/layer_4/intermediate/dense/Tanh" [label="[]", style=solid]; -"832 bert/encoder/layer_4/intermediate/dense/Tanh" -> "833 bert/encoder/layer_4/intermediate/dense/add_1" [label="[]", style=solid]; -"833 bert/encoder/layer_4/intermediate/dense/add_1" -> "834 bert/encoder/layer_4/intermediate/dense/mul_2" [label="[]", style=solid]; -"834 bert/encoder/layer_4/intermediate/dense/mul_2" -> "835 bert/encoder/layer_4/intermediate/dense/mul_3" [label="[]", style=solid]; -"835 bert/encoder/layer_4/intermediate/dense/mul_3" -> "836 QuantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; -"836 QuantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3^0_1" -> "837 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; -"837 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3^0_1" -> "840 bert/encoder/layer_4/output/dense/MatMul" [label="[]", style=solid]; -"838 QuantizeLinear_bert/encoder/layer_4/output/dense/kernel^0_1" -> "839 DequantizeLinear_bert/encoder/layer_4/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; -"839 DequantizeLinear_bert/encoder/layer_4/output/dense/kernel^0_1" -> "840 bert/encoder/layer_4/output/dense/MatMul" [label="[3072, 768]", style=solid]; -"840 bert/encoder/layer_4/output/dense/MatMul" -> "841 bert/encoder/layer_4/output/dense/BiasAdd" [label="[]", style=solid]; -"841 bert/encoder/layer_4/output/dense/BiasAdd" -> "842 bert/encoder/layer_4/output/add" [label="[]", style=solid]; -"842 bert/encoder/layer_4/output/add" -> "843 bert/encoder/layer_4/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"842 bert/encoder/layer_4/output/add" -> "845 bert/encoder/layer_4/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"842 bert/encoder/layer_4/output/add" -> "854 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"843 bert/encoder/layer_4/output/LayerNorm/moments/mean" -> "844 bert/encoder/layer_4/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"843 bert/encoder/layer_4/output/LayerNorm/moments/mean" -> "852 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"844 bert/encoder/layer_4/output/LayerNorm/moments/StopGradient" -> "845 bert/encoder/layer_4/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"845 bert/encoder/layer_4/output/LayerNorm/moments/SquaredDifference" -> "846 bert/encoder/layer_4/output/LayerNorm/moments/SquaredDifference__369" [label="[]", style=solid]; -"846 bert/encoder/layer_4/output/LayerNorm/moments/SquaredDifference__369" -> "847 bert/encoder/layer_4/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"847 bert/encoder/layer_4/output/LayerNorm/moments/variance" -> "848 bert/encoder/layer_4/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"848 bert/encoder/layer_4/output/LayerNorm/batchnorm/add" -> "849 bert/encoder/layer_4/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"849 bert/encoder/layer_4/output/LayerNorm/batchnorm/Rsqrt" -> "850 bert/encoder/layer_4/output/LayerNorm/batchnorm/Rsqrt__371" [label="[]", style=solid]; -"850 bert/encoder/layer_4/output/LayerNorm/batchnorm/Rsqrt__371" -> "851 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"851 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul" -> "852 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"851 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul" -> "854 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"852 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_2" -> "853 bert/encoder/layer_4/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"853 bert/encoder/layer_4/output/LayerNorm/batchnorm/sub" -> "855 bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"854 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_1" -> "855 bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"855 bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1" -> "856 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"855 bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1" -> "860 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; -"855 bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1" -> "862 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; -"855 bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1" -> "898 bert/encoder/layer_5/attention/output/add" [label="[]", style=solid]; -"856 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_1" -> "857 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"857 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_1" -> "864 bert/encoder/layer_5/attention/self/value/MatMul" [label="[]", style=solid]; -"858 QuantizeLinear_bert/encoder/layer_5/attention/self/value/kernel^0_1" -> "859 DequantizeLinear_bert/encoder/layer_5/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; -"859 DequantizeLinear_bert/encoder/layer_5/attention/self/value/kernel^0_1" -> "864 bert/encoder/layer_5/attention/self/value/MatMul" [label="[768, 768]", style=solid]; -"860 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_2" -> "861 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; -"861 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_2" -> "870 bert/encoder/layer_5/attention/self/query/MatMul" [label="[]", style=solid]; -"862 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_3" -> "863 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; -"863 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_3" -> "878 bert/encoder/layer_5/attention/self/key/MatMul" [label="[]", style=solid]; -"864 bert/encoder/layer_5/attention/self/value/MatMul" -> "865 bert/encoder/layer_5/attention/self/value/BiasAdd" [label="[]", style=solid]; -"865 bert/encoder/layer_5/attention/self/value/BiasAdd" -> "866 bert/encoder/layer_5/attention/self/Reshape_2" [label="[]", style=solid]; -"866 bert/encoder/layer_5/attention/self/Reshape_2" -> "867 bert/encoder/layer_5/attention/self/transpose_2" [label="[]", style=solid]; -"867 bert/encoder/layer_5/attention/self/transpose_2" -> "889 bert/encoder/layer_5/attention/self/MatMul_1" [label="[]", style=solid]; -"868 QuantizeLinear_bert/encoder/layer_5/attention/self/query/kernel^0_1" -> "869 DequantizeLinear_bert/encoder/layer_5/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; -"869 DequantizeLinear_bert/encoder/layer_5/attention/self/query/kernel^0_1" -> "870 bert/encoder/layer_5/attention/self/query/MatMul" [label="[768, 768]", style=solid]; -"870 bert/encoder/layer_5/attention/self/query/MatMul" -> "871 bert/encoder/layer_5/attention/self/query/BiasAdd" [label="[]", style=solid]; -"871 bert/encoder/layer_5/attention/self/query/BiasAdd" -> "872 QuantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; -"872 QuantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd^0_1" -> "873 DequantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; -"873 DequantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd^0_1" -> "874 bert/encoder/layer_5/attention/self/Reshape" [label="[]", style=solid]; -"874 bert/encoder/layer_5/attention/self/Reshape" -> "875 bert/encoder/layer_5/attention/self/transpose" [label="[]", style=solid]; -"875 bert/encoder/layer_5/attention/self/transpose" -> "885 bert/encoder/layer_5/attention/self/MatMul" [label="[]", style=solid]; -"876 QuantizeLinear_bert/encoder/layer_5/attention/self/key/kernel^0_1" -> "877 DequantizeLinear_bert/encoder/layer_5/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; -"877 DequantizeLinear_bert/encoder/layer_5/attention/self/key/kernel^0_1" -> "878 bert/encoder/layer_5/attention/self/key/MatMul" [label="[768, 768]", style=solid]; -"878 bert/encoder/layer_5/attention/self/key/MatMul" -> "879 bert/encoder/layer_5/attention/self/key/BiasAdd" [label="[]", style=solid]; -"879 bert/encoder/layer_5/attention/self/key/BiasAdd" -> "880 QuantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; -"880 QuantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd^0_1" -> "881 DequantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; -"881 DequantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd^0_1" -> "882 bert/encoder/layer_5/attention/self/Reshape_1" [label="[]", style=solid]; -"882 bert/encoder/layer_5/attention/self/Reshape_1" -> "883 bert/encoder/layer_5/attention/self/transpose_1" [label="[]", style=solid]; -"883 bert/encoder/layer_5/attention/self/transpose_1" -> "884 bert/encoder/layer_5/attention/self/MatMul__376" [label="[]", style=solid]; -"884 bert/encoder/layer_5/attention/self/MatMul__376" -> "885 bert/encoder/layer_5/attention/self/MatMul" [label="[]", style=solid]; -"885 bert/encoder/layer_5/attention/self/MatMul" -> "886 bert/encoder/layer_5/attention/self/Mul" [label="[]", style=solid]; -"886 bert/encoder/layer_5/attention/self/Mul" -> "887 bert/encoder/layer_5/attention/self/add" [label="[]", style=solid]; -"887 bert/encoder/layer_5/attention/self/add" -> "888 bert/encoder/layer_5/attention/self/Softmax" [label="[]", style=solid]; -"888 bert/encoder/layer_5/attention/self/Softmax" -> "889 bert/encoder/layer_5/attention/self/MatMul_1" [label="[]", style=solid]; -"889 bert/encoder/layer_5/attention/self/MatMul_1" -> "890 QuantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1^0_1" [label="[]", style=solid]; -"890 QuantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1^0_1" -> "891 DequantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; -"891 DequantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1^0_1" -> "892 bert/encoder/layer_5/attention/self/transpose_3" [label="[]", style=solid]; -"892 bert/encoder/layer_5/attention/self/transpose_3" -> "893 bert/encoder/layer_5/attention/self/Reshape_3" [label="[]", style=solid]; -"893 bert/encoder/layer_5/attention/self/Reshape_3" -> "896 bert/encoder/layer_5/attention/output/dense/MatMul" [label="[]", style=solid]; -"894 QuantizeLinear_bert/encoder/layer_5/attention/output/dense/kernel^0_1" -> "895 DequantizeLinear_bert/encoder/layer_5/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; -"895 DequantizeLinear_bert/encoder/layer_5/attention/output/dense/kernel^0_1" -> "896 bert/encoder/layer_5/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; -"896 bert/encoder/layer_5/attention/output/dense/MatMul" -> "897 bert/encoder/layer_5/attention/output/dense/BiasAdd" [label="[]", style=solid]; -"897 bert/encoder/layer_5/attention/output/dense/BiasAdd" -> "898 bert/encoder/layer_5/attention/output/add" [label="[]", style=solid]; -"898 bert/encoder/layer_5/attention/output/add" -> "899 bert/encoder/layer_5/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"898 bert/encoder/layer_5/attention/output/add" -> "901 bert/encoder/layer_5/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"898 bert/encoder/layer_5/attention/output/add" -> "910 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"899 bert/encoder/layer_5/attention/output/LayerNorm/moments/mean" -> "900 bert/encoder/layer_5/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"899 bert/encoder/layer_5/attention/output/LayerNorm/moments/mean" -> "908 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"900 bert/encoder/layer_5/attention/output/LayerNorm/moments/StopGradient" -> "901 bert/encoder/layer_5/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"901 bert/encoder/layer_5/attention/output/LayerNorm/moments/SquaredDifference" -> "902 bert/encoder/layer_5/attention/output/LayerNorm/moments/SquaredDifference__379" [label="[]", style=solid]; -"902 bert/encoder/layer_5/attention/output/LayerNorm/moments/SquaredDifference__379" -> "903 bert/encoder/layer_5/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"903 bert/encoder/layer_5/attention/output/LayerNorm/moments/variance" -> "904 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"904 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add" -> "905 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"905 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/Rsqrt" -> "906 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/Rsqrt__381" [label="[]", style=solid]; -"906 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/Rsqrt__381" -> "907 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"907 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul" -> "908 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"907 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul" -> "910 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"908 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_2" -> "909 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"909 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/sub" -> "911 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"910 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_1" -> "911 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"911 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1" -> "912 QuantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"911 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1" -> "932 bert/encoder/layer_5/output/add" [label="[]", style=solid]; -"912 QuantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "913 DequantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"913 DequantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "916 bert/encoder/layer_5/intermediate/dense/MatMul" [label="[]", style=solid]; -"914 QuantizeLinear_bert/encoder/layer_5/intermediate/dense/kernel^0_1" -> "915 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; -"915 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/kernel^0_1" -> "916 bert/encoder/layer_5/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; -"916 bert/encoder/layer_5/intermediate/dense/MatMul" -> "917 bert/encoder/layer_5/intermediate/dense/BiasAdd" [label="[]", style=solid]; -"917 bert/encoder/layer_5/intermediate/dense/BiasAdd" -> "918 bert/encoder/layer_5/intermediate/dense/Pow" [label="[]", style=solid]; -"917 bert/encoder/layer_5/intermediate/dense/BiasAdd" -> "920 bert/encoder/layer_5/intermediate/dense/add" [label="[]", style=solid]; -"917 bert/encoder/layer_5/intermediate/dense/BiasAdd" -> "925 bert/encoder/layer_5/intermediate/dense/mul_3" [label="[]", style=solid]; -"918 bert/encoder/layer_5/intermediate/dense/Pow" -> "919 bert/encoder/layer_5/intermediate/dense/mul" [label="[]", style=solid]; -"919 bert/encoder/layer_5/intermediate/dense/mul" -> "920 bert/encoder/layer_5/intermediate/dense/add" [label="[]", style=solid]; -"920 bert/encoder/layer_5/intermediate/dense/add" -> "921 bert/encoder/layer_5/intermediate/dense/mul_1" [label="[]", style=solid]; -"921 bert/encoder/layer_5/intermediate/dense/mul_1" -> "922 bert/encoder/layer_5/intermediate/dense/Tanh" [label="[]", style=solid]; -"922 bert/encoder/layer_5/intermediate/dense/Tanh" -> "923 bert/encoder/layer_5/intermediate/dense/add_1" [label="[]", style=solid]; -"923 bert/encoder/layer_5/intermediate/dense/add_1" -> "924 bert/encoder/layer_5/intermediate/dense/mul_2" [label="[]", style=solid]; -"924 bert/encoder/layer_5/intermediate/dense/mul_2" -> "925 bert/encoder/layer_5/intermediate/dense/mul_3" [label="[]", style=solid]; -"925 bert/encoder/layer_5/intermediate/dense/mul_3" -> "926 QuantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; -"926 QuantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3^0_1" -> "927 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; -"927 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3^0_1" -> "930 bert/encoder/layer_5/output/dense/MatMul" [label="[]", style=solid]; -"928 QuantizeLinear_bert/encoder/layer_5/output/dense/kernel^0_1" -> "929 DequantizeLinear_bert/encoder/layer_5/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; -"929 DequantizeLinear_bert/encoder/layer_5/output/dense/kernel^0_1" -> "930 bert/encoder/layer_5/output/dense/MatMul" [label="[3072, 768]", style=solid]; -"930 bert/encoder/layer_5/output/dense/MatMul" -> "931 bert/encoder/layer_5/output/dense/BiasAdd" [label="[]", style=solid]; -"931 bert/encoder/layer_5/output/dense/BiasAdd" -> "932 bert/encoder/layer_5/output/add" [label="[]", style=solid]; -"932 bert/encoder/layer_5/output/add" -> "933 bert/encoder/layer_5/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"932 bert/encoder/layer_5/output/add" -> "935 bert/encoder/layer_5/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"932 bert/encoder/layer_5/output/add" -> "944 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"933 bert/encoder/layer_5/output/LayerNorm/moments/mean" -> "934 bert/encoder/layer_5/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"933 bert/encoder/layer_5/output/LayerNorm/moments/mean" -> "942 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"934 bert/encoder/layer_5/output/LayerNorm/moments/StopGradient" -> "935 bert/encoder/layer_5/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"935 bert/encoder/layer_5/output/LayerNorm/moments/SquaredDifference" -> "936 bert/encoder/layer_5/output/LayerNorm/moments/SquaredDifference__383" [label="[]", style=solid]; -"936 bert/encoder/layer_5/output/LayerNorm/moments/SquaredDifference__383" -> "937 bert/encoder/layer_5/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"937 bert/encoder/layer_5/output/LayerNorm/moments/variance" -> "938 bert/encoder/layer_5/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"938 bert/encoder/layer_5/output/LayerNorm/batchnorm/add" -> "939 bert/encoder/layer_5/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"939 bert/encoder/layer_5/output/LayerNorm/batchnorm/Rsqrt" -> "940 bert/encoder/layer_5/output/LayerNorm/batchnorm/Rsqrt__385" [label="[]", style=solid]; -"940 bert/encoder/layer_5/output/LayerNorm/batchnorm/Rsqrt__385" -> "941 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"941 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul" -> "942 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"941 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul" -> "944 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"942 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_2" -> "943 bert/encoder/layer_5/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"943 bert/encoder/layer_5/output/LayerNorm/batchnorm/sub" -> "945 bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"944 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_1" -> "945 bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"945 bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1" -> "946 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"945 bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1" -> "950 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; -"945 bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1" -> "952 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; -"945 bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1" -> "988 bert/encoder/layer_6/attention/output/add" [label="[]", style=solid]; -"946 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_1" -> "947 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"947 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_1" -> "954 bert/encoder/layer_6/attention/self/value/MatMul" [label="[]", style=solid]; -"948 QuantizeLinear_bert/encoder/layer_6/attention/self/value/kernel^0_1" -> "949 DequantizeLinear_bert/encoder/layer_6/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; -"949 DequantizeLinear_bert/encoder/layer_6/attention/self/value/kernel^0_1" -> "954 bert/encoder/layer_6/attention/self/value/MatMul" [label="[768, 768]", style=solid]; -"950 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_2" -> "951 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; -"951 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_2" -> "960 bert/encoder/layer_6/attention/self/query/MatMul" [label="[]", style=solid]; -"952 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_3" -> "953 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; -"953 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_3" -> "968 bert/encoder/layer_6/attention/self/key/MatMul" [label="[]", style=solid]; -"954 bert/encoder/layer_6/attention/self/value/MatMul" -> "955 bert/encoder/layer_6/attention/self/value/BiasAdd" [label="[]", style=solid]; -"955 bert/encoder/layer_6/attention/self/value/BiasAdd" -> "956 bert/encoder/layer_6/attention/self/Reshape_2" [label="[]", style=solid]; -"956 bert/encoder/layer_6/attention/self/Reshape_2" -> "957 bert/encoder/layer_6/attention/self/transpose_2" [label="[]", style=solid]; -"957 bert/encoder/layer_6/attention/self/transpose_2" -> "979 bert/encoder/layer_6/attention/self/MatMul_1" [label="[]", style=solid]; -"958 QuantizeLinear_bert/encoder/layer_6/attention/self/query/kernel^0_1" -> "959 DequantizeLinear_bert/encoder/layer_6/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; -"959 DequantizeLinear_bert/encoder/layer_6/attention/self/query/kernel^0_1" -> "960 bert/encoder/layer_6/attention/self/query/MatMul" [label="[768, 768]", style=solid]; -"960 bert/encoder/layer_6/attention/self/query/MatMul" -> "961 bert/encoder/layer_6/attention/self/query/BiasAdd" [label="[]", style=solid]; -"961 bert/encoder/layer_6/attention/self/query/BiasAdd" -> "962 QuantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; -"962 QuantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd^0_1" -> "963 DequantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; -"963 DequantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd^0_1" -> "964 bert/encoder/layer_6/attention/self/Reshape" [label="[]", style=solid]; -"964 bert/encoder/layer_6/attention/self/Reshape" -> "965 bert/encoder/layer_6/attention/self/transpose" [label="[]", style=solid]; -"965 bert/encoder/layer_6/attention/self/transpose" -> "975 bert/encoder/layer_6/attention/self/MatMul" [label="[]", style=solid]; -"966 QuantizeLinear_bert/encoder/layer_6/attention/self/key/kernel^0_1" -> "967 DequantizeLinear_bert/encoder/layer_6/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; -"967 DequantizeLinear_bert/encoder/layer_6/attention/self/key/kernel^0_1" -> "968 bert/encoder/layer_6/attention/self/key/MatMul" [label="[768, 768]", style=solid]; -"968 bert/encoder/layer_6/attention/self/key/MatMul" -> "969 bert/encoder/layer_6/attention/self/key/BiasAdd" [label="[]", style=solid]; -"969 bert/encoder/layer_6/attention/self/key/BiasAdd" -> "970 QuantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; -"970 QuantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd^0_1" -> "971 DequantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; -"971 DequantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd^0_1" -> "972 bert/encoder/layer_6/attention/self/Reshape_1" [label="[]", style=solid]; -"972 bert/encoder/layer_6/attention/self/Reshape_1" -> "973 bert/encoder/layer_6/attention/self/transpose_1" [label="[]", style=solid]; -"973 bert/encoder/layer_6/attention/self/transpose_1" -> "974 bert/encoder/layer_6/attention/self/MatMul__390" [label="[]", style=solid]; -"974 bert/encoder/layer_6/attention/self/MatMul__390" -> "975 bert/encoder/layer_6/attention/self/MatMul" [label="[]", style=solid]; -"975 bert/encoder/layer_6/attention/self/MatMul" -> "976 bert/encoder/layer_6/attention/self/Mul" [label="[]", style=solid]; -"976 bert/encoder/layer_6/attention/self/Mul" -> "977 bert/encoder/layer_6/attention/self/add" [label="[]", style=solid]; -"977 bert/encoder/layer_6/attention/self/add" -> "978 bert/encoder/layer_6/attention/self/Softmax" [label="[]", style=solid]; -"978 bert/encoder/layer_6/attention/self/Softmax" -> "979 bert/encoder/layer_6/attention/self/MatMul_1" [label="[]", style=solid]; -"979 bert/encoder/layer_6/attention/self/MatMul_1" -> "980 QuantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1^0_1" [label="[]", style=solid]; -"980 QuantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1^0_1" -> "981 DequantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; -"981 DequantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1^0_1" -> "982 bert/encoder/layer_6/attention/self/transpose_3" [label="[]", style=solid]; -"982 bert/encoder/layer_6/attention/self/transpose_3" -> "983 bert/encoder/layer_6/attention/self/Reshape_3" [label="[]", style=solid]; -"983 bert/encoder/layer_6/attention/self/Reshape_3" -> "986 bert/encoder/layer_6/attention/output/dense/MatMul" [label="[]", style=solid]; -"984 QuantizeLinear_bert/encoder/layer_6/attention/output/dense/kernel^0_1" -> "985 DequantizeLinear_bert/encoder/layer_6/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; -"985 DequantizeLinear_bert/encoder/layer_6/attention/output/dense/kernel^0_1" -> "986 bert/encoder/layer_6/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; -"986 bert/encoder/layer_6/attention/output/dense/MatMul" -> "987 bert/encoder/layer_6/attention/output/dense/BiasAdd" [label="[]", style=solid]; -"987 bert/encoder/layer_6/attention/output/dense/BiasAdd" -> "988 bert/encoder/layer_6/attention/output/add" [label="[]", style=solid]; -"988 bert/encoder/layer_6/attention/output/add" -> "989 bert/encoder/layer_6/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"988 bert/encoder/layer_6/attention/output/add" -> "991 bert/encoder/layer_6/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"988 bert/encoder/layer_6/attention/output/add" -> "1000 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"989 bert/encoder/layer_6/attention/output/LayerNorm/moments/mean" -> "990 bert/encoder/layer_6/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"989 bert/encoder/layer_6/attention/output/LayerNorm/moments/mean" -> "998 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"990 bert/encoder/layer_6/attention/output/LayerNorm/moments/StopGradient" -> "991 bert/encoder/layer_6/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"991 bert/encoder/layer_6/attention/output/LayerNorm/moments/SquaredDifference" -> "992 bert/encoder/layer_6/attention/output/LayerNorm/moments/SquaredDifference__393" [label="[]", style=solid]; -"992 bert/encoder/layer_6/attention/output/LayerNorm/moments/SquaredDifference__393" -> "993 bert/encoder/layer_6/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"993 bert/encoder/layer_6/attention/output/LayerNorm/moments/variance" -> "994 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"994 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add" -> "995 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"995 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/Rsqrt" -> "996 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/Rsqrt__395" [label="[]", style=solid]; -"996 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/Rsqrt__395" -> "997 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"997 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul" -> "998 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"997 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul" -> "1000 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"998 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_2" -> "999 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"999 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/sub" -> "1001 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1000 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_1" -> "1001 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1001 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1" -> "1002 QuantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"1001 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1" -> "1022 bert/encoder/layer_6/output/add" [label="[]", style=solid]; -"1002 QuantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1003 DequantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"1003 DequantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1006 bert/encoder/layer_6/intermediate/dense/MatMul" [label="[]", style=solid]; -"1004 QuantizeLinear_bert/encoder/layer_6/intermediate/dense/kernel^0_1" -> "1005 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; -"1005 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/kernel^0_1" -> "1006 bert/encoder/layer_6/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; -"1006 bert/encoder/layer_6/intermediate/dense/MatMul" -> "1007 bert/encoder/layer_6/intermediate/dense/BiasAdd" [label="[]", style=solid]; -"1007 bert/encoder/layer_6/intermediate/dense/BiasAdd" -> "1008 bert/encoder/layer_6/intermediate/dense/Pow" [label="[]", style=solid]; -"1007 bert/encoder/layer_6/intermediate/dense/BiasAdd" -> "1010 bert/encoder/layer_6/intermediate/dense/add" [label="[]", style=solid]; -"1007 bert/encoder/layer_6/intermediate/dense/BiasAdd" -> "1015 bert/encoder/layer_6/intermediate/dense/mul_3" [label="[]", style=solid]; -"1008 bert/encoder/layer_6/intermediate/dense/Pow" -> "1009 bert/encoder/layer_6/intermediate/dense/mul" [label="[]", style=solid]; -"1009 bert/encoder/layer_6/intermediate/dense/mul" -> "1010 bert/encoder/layer_6/intermediate/dense/add" [label="[]", style=solid]; -"1010 bert/encoder/layer_6/intermediate/dense/add" -> "1011 bert/encoder/layer_6/intermediate/dense/mul_1" [label="[]", style=solid]; -"1011 bert/encoder/layer_6/intermediate/dense/mul_1" -> "1012 bert/encoder/layer_6/intermediate/dense/Tanh" [label="[]", style=solid]; -"1012 bert/encoder/layer_6/intermediate/dense/Tanh" -> "1013 bert/encoder/layer_6/intermediate/dense/add_1" [label="[]", style=solid]; -"1013 bert/encoder/layer_6/intermediate/dense/add_1" -> "1014 bert/encoder/layer_6/intermediate/dense/mul_2" [label="[]", style=solid]; -"1014 bert/encoder/layer_6/intermediate/dense/mul_2" -> "1015 bert/encoder/layer_6/intermediate/dense/mul_3" [label="[]", style=solid]; -"1015 bert/encoder/layer_6/intermediate/dense/mul_3" -> "1016 QuantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; -"1016 QuantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3^0_1" -> "1017 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; -"1017 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3^0_1" -> "1020 bert/encoder/layer_6/output/dense/MatMul" [label="[]", style=solid]; -"1018 QuantizeLinear_bert/encoder/layer_6/output/dense/kernel^0_1" -> "1019 DequantizeLinear_bert/encoder/layer_6/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; -"1019 DequantizeLinear_bert/encoder/layer_6/output/dense/kernel^0_1" -> "1020 bert/encoder/layer_6/output/dense/MatMul" [label="[3072, 768]", style=solid]; -"1020 bert/encoder/layer_6/output/dense/MatMul" -> "1021 bert/encoder/layer_6/output/dense/BiasAdd" [label="[]", style=solid]; -"1021 bert/encoder/layer_6/output/dense/BiasAdd" -> "1022 bert/encoder/layer_6/output/add" [label="[]", style=solid]; -"1022 bert/encoder/layer_6/output/add" -> "1023 bert/encoder/layer_6/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"1022 bert/encoder/layer_6/output/add" -> "1025 bert/encoder/layer_6/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1022 bert/encoder/layer_6/output/add" -> "1034 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1023 bert/encoder/layer_6/output/LayerNorm/moments/mean" -> "1024 bert/encoder/layer_6/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"1023 bert/encoder/layer_6/output/LayerNorm/moments/mean" -> "1032 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1024 bert/encoder/layer_6/output/LayerNorm/moments/StopGradient" -> "1025 bert/encoder/layer_6/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1025 bert/encoder/layer_6/output/LayerNorm/moments/SquaredDifference" -> "1026 bert/encoder/layer_6/output/LayerNorm/moments/SquaredDifference__397" [label="[]", style=solid]; -"1026 bert/encoder/layer_6/output/LayerNorm/moments/SquaredDifference__397" -> "1027 bert/encoder/layer_6/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"1027 bert/encoder/layer_6/output/LayerNorm/moments/variance" -> "1028 bert/encoder/layer_6/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"1028 bert/encoder/layer_6/output/LayerNorm/batchnorm/add" -> "1029 bert/encoder/layer_6/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"1029 bert/encoder/layer_6/output/LayerNorm/batchnorm/Rsqrt" -> "1030 bert/encoder/layer_6/output/LayerNorm/batchnorm/Rsqrt__399" [label="[]", style=solid]; -"1030 bert/encoder/layer_6/output/LayerNorm/batchnorm/Rsqrt__399" -> "1031 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"1031 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul" -> "1032 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1031 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul" -> "1034 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1032 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_2" -> "1033 bert/encoder/layer_6/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"1033 bert/encoder/layer_6/output/LayerNorm/batchnorm/sub" -> "1035 bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1034 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_1" -> "1035 bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1035 bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1" -> "1036 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"1035 bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1" -> "1040 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; -"1035 bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1" -> "1042 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; -"1035 bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1" -> "1078 bert/encoder/layer_7/attention/output/add" [label="[]", style=solid]; -"1036 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_1" -> "1037 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"1037 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_1" -> "1044 bert/encoder/layer_7/attention/self/value/MatMul" [label="[]", style=solid]; -"1038 QuantizeLinear_bert/encoder/layer_7/attention/self/value/kernel^0_1" -> "1039 DequantizeLinear_bert/encoder/layer_7/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; -"1039 DequantizeLinear_bert/encoder/layer_7/attention/self/value/kernel^0_1" -> "1044 bert/encoder/layer_7/attention/self/value/MatMul" [label="[768, 768]", style=solid]; -"1040 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_2" -> "1041 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; -"1041 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_2" -> "1050 bert/encoder/layer_7/attention/self/query/MatMul" [label="[]", style=solid]; -"1042 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_3" -> "1043 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; -"1043 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_3" -> "1058 bert/encoder/layer_7/attention/self/key/MatMul" [label="[]", style=solid]; -"1044 bert/encoder/layer_7/attention/self/value/MatMul" -> "1045 bert/encoder/layer_7/attention/self/value/BiasAdd" [label="[]", style=solid]; -"1045 bert/encoder/layer_7/attention/self/value/BiasAdd" -> "1046 bert/encoder/layer_7/attention/self/Reshape_2" [label="[]", style=solid]; -"1046 bert/encoder/layer_7/attention/self/Reshape_2" -> "1047 bert/encoder/layer_7/attention/self/transpose_2" [label="[]", style=solid]; -"1047 bert/encoder/layer_7/attention/self/transpose_2" -> "1069 bert/encoder/layer_7/attention/self/MatMul_1" [label="[]", style=solid]; -"1048 QuantizeLinear_bert/encoder/layer_7/attention/self/query/kernel^0_1" -> "1049 DequantizeLinear_bert/encoder/layer_7/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; -"1049 DequantizeLinear_bert/encoder/layer_7/attention/self/query/kernel^0_1" -> "1050 bert/encoder/layer_7/attention/self/query/MatMul" [label="[768, 768]", style=solid]; -"1050 bert/encoder/layer_7/attention/self/query/MatMul" -> "1051 bert/encoder/layer_7/attention/self/query/BiasAdd" [label="[]", style=solid]; -"1051 bert/encoder/layer_7/attention/self/query/BiasAdd" -> "1052 QuantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; -"1052 QuantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd^0_1" -> "1053 DequantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; -"1053 DequantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd^0_1" -> "1054 bert/encoder/layer_7/attention/self/Reshape" [label="[]", style=solid]; -"1054 bert/encoder/layer_7/attention/self/Reshape" -> "1055 bert/encoder/layer_7/attention/self/transpose" [label="[]", style=solid]; -"1055 bert/encoder/layer_7/attention/self/transpose" -> "1065 bert/encoder/layer_7/attention/self/MatMul" [label="[]", style=solid]; -"1056 QuantizeLinear_bert/encoder/layer_7/attention/self/key/kernel^0_1" -> "1057 DequantizeLinear_bert/encoder/layer_7/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; -"1057 DequantizeLinear_bert/encoder/layer_7/attention/self/key/kernel^0_1" -> "1058 bert/encoder/layer_7/attention/self/key/MatMul" [label="[768, 768]", style=solid]; -"1058 bert/encoder/layer_7/attention/self/key/MatMul" -> "1059 bert/encoder/layer_7/attention/self/key/BiasAdd" [label="[]", style=solid]; -"1059 bert/encoder/layer_7/attention/self/key/BiasAdd" -> "1060 QuantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; -"1060 QuantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd^0_1" -> "1061 DequantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; -"1061 DequantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd^0_1" -> "1062 bert/encoder/layer_7/attention/self/Reshape_1" [label="[]", style=solid]; -"1062 bert/encoder/layer_7/attention/self/Reshape_1" -> "1063 bert/encoder/layer_7/attention/self/transpose_1" [label="[]", style=solid]; -"1063 bert/encoder/layer_7/attention/self/transpose_1" -> "1064 bert/encoder/layer_7/attention/self/MatMul__404" [label="[]", style=solid]; -"1064 bert/encoder/layer_7/attention/self/MatMul__404" -> "1065 bert/encoder/layer_7/attention/self/MatMul" [label="[]", style=solid]; -"1065 bert/encoder/layer_7/attention/self/MatMul" -> "1066 bert/encoder/layer_7/attention/self/Mul" [label="[]", style=solid]; -"1066 bert/encoder/layer_7/attention/self/Mul" -> "1067 bert/encoder/layer_7/attention/self/add" [label="[]", style=solid]; -"1067 bert/encoder/layer_7/attention/self/add" -> "1068 bert/encoder/layer_7/attention/self/Softmax" [label="[]", style=solid]; -"1068 bert/encoder/layer_7/attention/self/Softmax" -> "1069 bert/encoder/layer_7/attention/self/MatMul_1" [label="[]", style=solid]; -"1069 bert/encoder/layer_7/attention/self/MatMul_1" -> "1070 QuantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1^0_1" [label="[]", style=solid]; -"1070 QuantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1^0_1" -> "1071 DequantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; -"1071 DequantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1^0_1" -> "1072 bert/encoder/layer_7/attention/self/transpose_3" [label="[]", style=solid]; -"1072 bert/encoder/layer_7/attention/self/transpose_3" -> "1073 bert/encoder/layer_7/attention/self/Reshape_3" [label="[]", style=solid]; -"1073 bert/encoder/layer_7/attention/self/Reshape_3" -> "1076 bert/encoder/layer_7/attention/output/dense/MatMul" [label="[]", style=solid]; -"1074 QuantizeLinear_bert/encoder/layer_7/attention/output/dense/kernel^0_1" -> "1075 DequantizeLinear_bert/encoder/layer_7/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; -"1075 DequantizeLinear_bert/encoder/layer_7/attention/output/dense/kernel^0_1" -> "1076 bert/encoder/layer_7/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; -"1076 bert/encoder/layer_7/attention/output/dense/MatMul" -> "1077 bert/encoder/layer_7/attention/output/dense/BiasAdd" [label="[]", style=solid]; -"1077 bert/encoder/layer_7/attention/output/dense/BiasAdd" -> "1078 bert/encoder/layer_7/attention/output/add" [label="[]", style=solid]; -"1078 bert/encoder/layer_7/attention/output/add" -> "1079 bert/encoder/layer_7/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"1078 bert/encoder/layer_7/attention/output/add" -> "1081 bert/encoder/layer_7/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1078 bert/encoder/layer_7/attention/output/add" -> "1090 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1079 bert/encoder/layer_7/attention/output/LayerNorm/moments/mean" -> "1080 bert/encoder/layer_7/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"1079 bert/encoder/layer_7/attention/output/LayerNorm/moments/mean" -> "1088 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1080 bert/encoder/layer_7/attention/output/LayerNorm/moments/StopGradient" -> "1081 bert/encoder/layer_7/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1081 bert/encoder/layer_7/attention/output/LayerNorm/moments/SquaredDifference" -> "1082 bert/encoder/layer_7/attention/output/LayerNorm/moments/SquaredDifference__407" [label="[]", style=solid]; -"1082 bert/encoder/layer_7/attention/output/LayerNorm/moments/SquaredDifference__407" -> "1083 bert/encoder/layer_7/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"1083 bert/encoder/layer_7/attention/output/LayerNorm/moments/variance" -> "1084 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"1084 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add" -> "1085 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"1085 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/Rsqrt" -> "1086 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/Rsqrt__409" [label="[]", style=solid]; -"1086 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/Rsqrt__409" -> "1087 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"1087 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul" -> "1088 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1087 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul" -> "1090 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1088 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_2" -> "1089 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"1089 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/sub" -> "1091 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1090 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_1" -> "1091 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1091 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1" -> "1092 QuantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"1091 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1" -> "1112 bert/encoder/layer_7/output/add" [label="[]", style=solid]; -"1092 QuantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1093 DequantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"1093 DequantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1096 bert/encoder/layer_7/intermediate/dense/MatMul" [label="[]", style=solid]; -"1094 QuantizeLinear_bert/encoder/layer_7/intermediate/dense/kernel^0_1" -> "1095 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; -"1095 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/kernel^0_1" -> "1096 bert/encoder/layer_7/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; -"1096 bert/encoder/layer_7/intermediate/dense/MatMul" -> "1097 bert/encoder/layer_7/intermediate/dense/BiasAdd" [label="[]", style=solid]; -"1097 bert/encoder/layer_7/intermediate/dense/BiasAdd" -> "1098 bert/encoder/layer_7/intermediate/dense/Pow" [label="[]", style=solid]; -"1097 bert/encoder/layer_7/intermediate/dense/BiasAdd" -> "1100 bert/encoder/layer_7/intermediate/dense/add" [label="[]", style=solid]; -"1097 bert/encoder/layer_7/intermediate/dense/BiasAdd" -> "1105 bert/encoder/layer_7/intermediate/dense/mul_3" [label="[]", style=solid]; -"1098 bert/encoder/layer_7/intermediate/dense/Pow" -> "1099 bert/encoder/layer_7/intermediate/dense/mul" [label="[]", style=solid]; -"1099 bert/encoder/layer_7/intermediate/dense/mul" -> "1100 bert/encoder/layer_7/intermediate/dense/add" [label="[]", style=solid]; -"1100 bert/encoder/layer_7/intermediate/dense/add" -> "1101 bert/encoder/layer_7/intermediate/dense/mul_1" [label="[]", style=solid]; -"1101 bert/encoder/layer_7/intermediate/dense/mul_1" -> "1102 bert/encoder/layer_7/intermediate/dense/Tanh" [label="[]", style=solid]; -"1102 bert/encoder/layer_7/intermediate/dense/Tanh" -> "1103 bert/encoder/layer_7/intermediate/dense/add_1" [label="[]", style=solid]; -"1103 bert/encoder/layer_7/intermediate/dense/add_1" -> "1104 bert/encoder/layer_7/intermediate/dense/mul_2" [label="[]", style=solid]; -"1104 bert/encoder/layer_7/intermediate/dense/mul_2" -> "1105 bert/encoder/layer_7/intermediate/dense/mul_3" [label="[]", style=solid]; -"1105 bert/encoder/layer_7/intermediate/dense/mul_3" -> "1106 QuantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; -"1106 QuantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3^0_1" -> "1107 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; -"1107 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3^0_1" -> "1110 bert/encoder/layer_7/output/dense/MatMul" [label="[]", style=solid]; -"1108 QuantizeLinear_bert/encoder/layer_7/output/dense/kernel^0_1" -> "1109 DequantizeLinear_bert/encoder/layer_7/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; -"1109 DequantizeLinear_bert/encoder/layer_7/output/dense/kernel^0_1" -> "1110 bert/encoder/layer_7/output/dense/MatMul" [label="[3072, 768]", style=solid]; -"1110 bert/encoder/layer_7/output/dense/MatMul" -> "1111 bert/encoder/layer_7/output/dense/BiasAdd" [label="[]", style=solid]; -"1111 bert/encoder/layer_7/output/dense/BiasAdd" -> "1112 bert/encoder/layer_7/output/add" [label="[]", style=solid]; -"1112 bert/encoder/layer_7/output/add" -> "1113 bert/encoder/layer_7/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"1112 bert/encoder/layer_7/output/add" -> "1115 bert/encoder/layer_7/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1112 bert/encoder/layer_7/output/add" -> "1124 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1113 bert/encoder/layer_7/output/LayerNorm/moments/mean" -> "1114 bert/encoder/layer_7/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"1113 bert/encoder/layer_7/output/LayerNorm/moments/mean" -> "1122 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1114 bert/encoder/layer_7/output/LayerNorm/moments/StopGradient" -> "1115 bert/encoder/layer_7/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1115 bert/encoder/layer_7/output/LayerNorm/moments/SquaredDifference" -> "1116 bert/encoder/layer_7/output/LayerNorm/moments/SquaredDifference__411" [label="[]", style=solid]; -"1116 bert/encoder/layer_7/output/LayerNorm/moments/SquaredDifference__411" -> "1117 bert/encoder/layer_7/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"1117 bert/encoder/layer_7/output/LayerNorm/moments/variance" -> "1118 bert/encoder/layer_7/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"1118 bert/encoder/layer_7/output/LayerNorm/batchnorm/add" -> "1119 bert/encoder/layer_7/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"1119 bert/encoder/layer_7/output/LayerNorm/batchnorm/Rsqrt" -> "1120 bert/encoder/layer_7/output/LayerNorm/batchnorm/Rsqrt__413" [label="[]", style=solid]; -"1120 bert/encoder/layer_7/output/LayerNorm/batchnorm/Rsqrt__413" -> "1121 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"1121 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul" -> "1122 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1121 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul" -> "1124 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1122 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_2" -> "1123 bert/encoder/layer_7/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"1123 bert/encoder/layer_7/output/LayerNorm/batchnorm/sub" -> "1125 bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1124 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_1" -> "1125 bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1125 bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1" -> "1126 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"1125 bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1" -> "1130 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; -"1125 bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1" -> "1132 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; -"1125 bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1" -> "1168 bert/encoder/layer_8/attention/output/add" [label="[]", style=solid]; -"1126 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_1" -> "1127 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"1127 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_1" -> "1134 bert/encoder/layer_8/attention/self/value/MatMul" [label="[]", style=solid]; -"1128 QuantizeLinear_bert/encoder/layer_8/attention/self/value/kernel^0_1" -> "1129 DequantizeLinear_bert/encoder/layer_8/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; -"1129 DequantizeLinear_bert/encoder/layer_8/attention/self/value/kernel^0_1" -> "1134 bert/encoder/layer_8/attention/self/value/MatMul" [label="[768, 768]", style=solid]; -"1130 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_2" -> "1131 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; -"1131 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_2" -> "1140 bert/encoder/layer_8/attention/self/query/MatMul" [label="[]", style=solid]; -"1132 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_3" -> "1133 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; -"1133 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_3" -> "1148 bert/encoder/layer_8/attention/self/key/MatMul" [label="[]", style=solid]; -"1134 bert/encoder/layer_8/attention/self/value/MatMul" -> "1135 bert/encoder/layer_8/attention/self/value/BiasAdd" [label="[]", style=solid]; -"1135 bert/encoder/layer_8/attention/self/value/BiasAdd" -> "1136 bert/encoder/layer_8/attention/self/Reshape_2" [label="[]", style=solid]; -"1136 bert/encoder/layer_8/attention/self/Reshape_2" -> "1137 bert/encoder/layer_8/attention/self/transpose_2" [label="[]", style=solid]; -"1137 bert/encoder/layer_8/attention/self/transpose_2" -> "1159 bert/encoder/layer_8/attention/self/MatMul_1" [label="[]", style=solid]; -"1138 QuantizeLinear_bert/encoder/layer_8/attention/self/query/kernel^0_1" -> "1139 DequantizeLinear_bert/encoder/layer_8/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; -"1139 DequantizeLinear_bert/encoder/layer_8/attention/self/query/kernel^0_1" -> "1140 bert/encoder/layer_8/attention/self/query/MatMul" [label="[768, 768]", style=solid]; -"1140 bert/encoder/layer_8/attention/self/query/MatMul" -> "1141 bert/encoder/layer_8/attention/self/query/BiasAdd" [label="[]", style=solid]; -"1141 bert/encoder/layer_8/attention/self/query/BiasAdd" -> "1142 QuantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; -"1142 QuantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd^0_1" -> "1143 DequantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; -"1143 DequantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd^0_1" -> "1144 bert/encoder/layer_8/attention/self/Reshape" [label="[]", style=solid]; -"1144 bert/encoder/layer_8/attention/self/Reshape" -> "1145 bert/encoder/layer_8/attention/self/transpose" [label="[]", style=solid]; -"1145 bert/encoder/layer_8/attention/self/transpose" -> "1155 bert/encoder/layer_8/attention/self/MatMul" [label="[]", style=solid]; -"1146 QuantizeLinear_bert/encoder/layer_8/attention/self/key/kernel^0_1" -> "1147 DequantizeLinear_bert/encoder/layer_8/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; -"1147 DequantizeLinear_bert/encoder/layer_8/attention/self/key/kernel^0_1" -> "1148 bert/encoder/layer_8/attention/self/key/MatMul" [label="[768, 768]", style=solid]; -"1148 bert/encoder/layer_8/attention/self/key/MatMul" -> "1149 bert/encoder/layer_8/attention/self/key/BiasAdd" [label="[]", style=solid]; -"1149 bert/encoder/layer_8/attention/self/key/BiasAdd" -> "1150 QuantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; -"1150 QuantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd^0_1" -> "1151 DequantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; -"1151 DequantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd^0_1" -> "1152 bert/encoder/layer_8/attention/self/Reshape_1" [label="[]", style=solid]; -"1152 bert/encoder/layer_8/attention/self/Reshape_1" -> "1153 bert/encoder/layer_8/attention/self/transpose_1" [label="[]", style=solid]; -"1153 bert/encoder/layer_8/attention/self/transpose_1" -> "1154 bert/encoder/layer_8/attention/self/MatMul__418" [label="[]", style=solid]; -"1154 bert/encoder/layer_8/attention/self/MatMul__418" -> "1155 bert/encoder/layer_8/attention/self/MatMul" [label="[]", style=solid]; -"1155 bert/encoder/layer_8/attention/self/MatMul" -> "1156 bert/encoder/layer_8/attention/self/Mul" [label="[]", style=solid]; -"1156 bert/encoder/layer_8/attention/self/Mul" -> "1157 bert/encoder/layer_8/attention/self/add" [label="[]", style=solid]; -"1157 bert/encoder/layer_8/attention/self/add" -> "1158 bert/encoder/layer_8/attention/self/Softmax" [label="[]", style=solid]; -"1158 bert/encoder/layer_8/attention/self/Softmax" -> "1159 bert/encoder/layer_8/attention/self/MatMul_1" [label="[]", style=solid]; -"1159 bert/encoder/layer_8/attention/self/MatMul_1" -> "1160 QuantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1^0_1" [label="[]", style=solid]; -"1160 QuantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1^0_1" -> "1161 DequantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; -"1161 DequantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1^0_1" -> "1162 bert/encoder/layer_8/attention/self/transpose_3" [label="[]", style=solid]; -"1162 bert/encoder/layer_8/attention/self/transpose_3" -> "1163 bert/encoder/layer_8/attention/self/Reshape_3" [label="[]", style=solid]; -"1163 bert/encoder/layer_8/attention/self/Reshape_3" -> "1166 bert/encoder/layer_8/attention/output/dense/MatMul" [label="[]", style=solid]; -"1164 QuantizeLinear_bert/encoder/layer_8/attention/output/dense/kernel^0_1" -> "1165 DequantizeLinear_bert/encoder/layer_8/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; -"1165 DequantizeLinear_bert/encoder/layer_8/attention/output/dense/kernel^0_1" -> "1166 bert/encoder/layer_8/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; -"1166 bert/encoder/layer_8/attention/output/dense/MatMul" -> "1167 bert/encoder/layer_8/attention/output/dense/BiasAdd" [label="[]", style=solid]; -"1167 bert/encoder/layer_8/attention/output/dense/BiasAdd" -> "1168 bert/encoder/layer_8/attention/output/add" [label="[]", style=solid]; -"1168 bert/encoder/layer_8/attention/output/add" -> "1169 bert/encoder/layer_8/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"1168 bert/encoder/layer_8/attention/output/add" -> "1171 bert/encoder/layer_8/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1168 bert/encoder/layer_8/attention/output/add" -> "1180 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1169 bert/encoder/layer_8/attention/output/LayerNorm/moments/mean" -> "1170 bert/encoder/layer_8/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"1169 bert/encoder/layer_8/attention/output/LayerNorm/moments/mean" -> "1178 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1170 bert/encoder/layer_8/attention/output/LayerNorm/moments/StopGradient" -> "1171 bert/encoder/layer_8/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1171 bert/encoder/layer_8/attention/output/LayerNorm/moments/SquaredDifference" -> "1172 bert/encoder/layer_8/attention/output/LayerNorm/moments/SquaredDifference__421" [label="[]", style=solid]; -"1172 bert/encoder/layer_8/attention/output/LayerNorm/moments/SquaredDifference__421" -> "1173 bert/encoder/layer_8/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"1173 bert/encoder/layer_8/attention/output/LayerNorm/moments/variance" -> "1174 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"1174 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add" -> "1175 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"1175 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/Rsqrt" -> "1176 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/Rsqrt__423" [label="[]", style=solid]; -"1176 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/Rsqrt__423" -> "1177 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"1177 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul" -> "1178 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1177 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul" -> "1180 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1178 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_2" -> "1179 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"1179 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/sub" -> "1181 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1180 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_1" -> "1181 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1181 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1" -> "1182 QuantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"1181 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1" -> "1202 bert/encoder/layer_8/output/add" [label="[]", style=solid]; -"1182 QuantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1183 DequantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"1183 DequantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1186 bert/encoder/layer_8/intermediate/dense/MatMul" [label="[]", style=solid]; -"1184 QuantizeLinear_bert/encoder/layer_8/intermediate/dense/kernel^0_1" -> "1185 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; -"1185 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/kernel^0_1" -> "1186 bert/encoder/layer_8/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; -"1186 bert/encoder/layer_8/intermediate/dense/MatMul" -> "1187 bert/encoder/layer_8/intermediate/dense/BiasAdd" [label="[]", style=solid]; -"1187 bert/encoder/layer_8/intermediate/dense/BiasAdd" -> "1188 bert/encoder/layer_8/intermediate/dense/Pow" [label="[]", style=solid]; -"1187 bert/encoder/layer_8/intermediate/dense/BiasAdd" -> "1190 bert/encoder/layer_8/intermediate/dense/add" [label="[]", style=solid]; -"1187 bert/encoder/layer_8/intermediate/dense/BiasAdd" -> "1195 bert/encoder/layer_8/intermediate/dense/mul_3" [label="[]", style=solid]; -"1188 bert/encoder/layer_8/intermediate/dense/Pow" -> "1189 bert/encoder/layer_8/intermediate/dense/mul" [label="[]", style=solid]; -"1189 bert/encoder/layer_8/intermediate/dense/mul" -> "1190 bert/encoder/layer_8/intermediate/dense/add" [label="[]", style=solid]; -"1190 bert/encoder/layer_8/intermediate/dense/add" -> "1191 bert/encoder/layer_8/intermediate/dense/mul_1" [label="[]", style=solid]; -"1191 bert/encoder/layer_8/intermediate/dense/mul_1" -> "1192 bert/encoder/layer_8/intermediate/dense/Tanh" [label="[]", style=solid]; -"1192 bert/encoder/layer_8/intermediate/dense/Tanh" -> "1193 bert/encoder/layer_8/intermediate/dense/add_1" [label="[]", style=solid]; -"1193 bert/encoder/layer_8/intermediate/dense/add_1" -> "1194 bert/encoder/layer_8/intermediate/dense/mul_2" [label="[]", style=solid]; -"1194 bert/encoder/layer_8/intermediate/dense/mul_2" -> "1195 bert/encoder/layer_8/intermediate/dense/mul_3" [label="[]", style=solid]; -"1195 bert/encoder/layer_8/intermediate/dense/mul_3" -> "1196 QuantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; -"1196 QuantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3^0_1" -> "1197 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; -"1197 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3^0_1" -> "1200 bert/encoder/layer_8/output/dense/MatMul" [label="[]", style=solid]; -"1198 QuantizeLinear_bert/encoder/layer_8/output/dense/kernel^0_1" -> "1199 DequantizeLinear_bert/encoder/layer_8/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; -"1199 DequantizeLinear_bert/encoder/layer_8/output/dense/kernel^0_1" -> "1200 bert/encoder/layer_8/output/dense/MatMul" [label="[3072, 768]", style=solid]; -"1200 bert/encoder/layer_8/output/dense/MatMul" -> "1201 bert/encoder/layer_8/output/dense/BiasAdd" [label="[]", style=solid]; -"1201 bert/encoder/layer_8/output/dense/BiasAdd" -> "1202 bert/encoder/layer_8/output/add" [label="[]", style=solid]; -"1202 bert/encoder/layer_8/output/add" -> "1203 bert/encoder/layer_8/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"1202 bert/encoder/layer_8/output/add" -> "1205 bert/encoder/layer_8/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1202 bert/encoder/layer_8/output/add" -> "1214 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1203 bert/encoder/layer_8/output/LayerNorm/moments/mean" -> "1204 bert/encoder/layer_8/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"1203 bert/encoder/layer_8/output/LayerNorm/moments/mean" -> "1212 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1204 bert/encoder/layer_8/output/LayerNorm/moments/StopGradient" -> "1205 bert/encoder/layer_8/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1205 bert/encoder/layer_8/output/LayerNorm/moments/SquaredDifference" -> "1206 bert/encoder/layer_8/output/LayerNorm/moments/SquaredDifference__425" [label="[]", style=solid]; -"1206 bert/encoder/layer_8/output/LayerNorm/moments/SquaredDifference__425" -> "1207 bert/encoder/layer_8/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"1207 bert/encoder/layer_8/output/LayerNorm/moments/variance" -> "1208 bert/encoder/layer_8/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"1208 bert/encoder/layer_8/output/LayerNorm/batchnorm/add" -> "1209 bert/encoder/layer_8/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"1209 bert/encoder/layer_8/output/LayerNorm/batchnorm/Rsqrt" -> "1210 bert/encoder/layer_8/output/LayerNorm/batchnorm/Rsqrt__427" [label="[]", style=solid]; -"1210 bert/encoder/layer_8/output/LayerNorm/batchnorm/Rsqrt__427" -> "1211 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"1211 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul" -> "1212 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1211 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul" -> "1214 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1212 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_2" -> "1213 bert/encoder/layer_8/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"1213 bert/encoder/layer_8/output/LayerNorm/batchnorm/sub" -> "1215 bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1214 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_1" -> "1215 bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1215 bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1" -> "1216 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"1215 bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1" -> "1220 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; -"1215 bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1" -> "1222 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; -"1215 bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1" -> "1258 bert/encoder/layer_9/attention/output/add" [label="[]", style=solid]; -"1216 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_1" -> "1217 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"1217 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_1" -> "1224 bert/encoder/layer_9/attention/self/value/MatMul" [label="[]", style=solid]; -"1218 QuantizeLinear_bert/encoder/layer_9/attention/self/value/kernel^0_1" -> "1219 DequantizeLinear_bert/encoder/layer_9/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; -"1219 DequantizeLinear_bert/encoder/layer_9/attention/self/value/kernel^0_1" -> "1224 bert/encoder/layer_9/attention/self/value/MatMul" [label="[768, 768]", style=solid]; -"1220 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_2" -> "1221 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; -"1221 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_2" -> "1230 bert/encoder/layer_9/attention/self/query/MatMul" [label="[]", style=solid]; -"1222 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_3" -> "1223 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; -"1223 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_3" -> "1238 bert/encoder/layer_9/attention/self/key/MatMul" [label="[]", style=solid]; -"1224 bert/encoder/layer_9/attention/self/value/MatMul" -> "1225 bert/encoder/layer_9/attention/self/value/BiasAdd" [label="[]", style=solid]; -"1225 bert/encoder/layer_9/attention/self/value/BiasAdd" -> "1226 bert/encoder/layer_9/attention/self/Reshape_2" [label="[]", style=solid]; -"1226 bert/encoder/layer_9/attention/self/Reshape_2" -> "1227 bert/encoder/layer_9/attention/self/transpose_2" [label="[]", style=solid]; -"1227 bert/encoder/layer_9/attention/self/transpose_2" -> "1249 bert/encoder/layer_9/attention/self/MatMul_1" [label="[]", style=solid]; -"1228 QuantizeLinear_bert/encoder/layer_9/attention/self/query/kernel^0_1" -> "1229 DequantizeLinear_bert/encoder/layer_9/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; -"1229 DequantizeLinear_bert/encoder/layer_9/attention/self/query/kernel^0_1" -> "1230 bert/encoder/layer_9/attention/self/query/MatMul" [label="[768, 768]", style=solid]; -"1230 bert/encoder/layer_9/attention/self/query/MatMul" -> "1231 bert/encoder/layer_9/attention/self/query/BiasAdd" [label="[]", style=solid]; -"1231 bert/encoder/layer_9/attention/self/query/BiasAdd" -> "1232 QuantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; -"1232 QuantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd^0_1" -> "1233 DequantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; -"1233 DequantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd^0_1" -> "1234 bert/encoder/layer_9/attention/self/Reshape" [label="[]", style=solid]; -"1234 bert/encoder/layer_9/attention/self/Reshape" -> "1235 bert/encoder/layer_9/attention/self/transpose" [label="[]", style=solid]; -"1235 bert/encoder/layer_9/attention/self/transpose" -> "1245 bert/encoder/layer_9/attention/self/MatMul" [label="[]", style=solid]; -"1236 QuantizeLinear_bert/encoder/layer_9/attention/self/key/kernel^0_1" -> "1237 DequantizeLinear_bert/encoder/layer_9/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; -"1237 DequantizeLinear_bert/encoder/layer_9/attention/self/key/kernel^0_1" -> "1238 bert/encoder/layer_9/attention/self/key/MatMul" [label="[768, 768]", style=solid]; -"1238 bert/encoder/layer_9/attention/self/key/MatMul" -> "1239 bert/encoder/layer_9/attention/self/key/BiasAdd" [label="[]", style=solid]; -"1239 bert/encoder/layer_9/attention/self/key/BiasAdd" -> "1240 QuantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; -"1240 QuantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd^0_1" -> "1241 DequantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; -"1241 DequantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd^0_1" -> "1242 bert/encoder/layer_9/attention/self/Reshape_1" [label="[]", style=solid]; -"1242 bert/encoder/layer_9/attention/self/Reshape_1" -> "1243 bert/encoder/layer_9/attention/self/transpose_1" [label="[]", style=solid]; -"1243 bert/encoder/layer_9/attention/self/transpose_1" -> "1244 bert/encoder/layer_9/attention/self/MatMul__432" [label="[]", style=solid]; -"1244 bert/encoder/layer_9/attention/self/MatMul__432" -> "1245 bert/encoder/layer_9/attention/self/MatMul" [label="[]", style=solid]; -"1245 bert/encoder/layer_9/attention/self/MatMul" -> "1246 bert/encoder/layer_9/attention/self/Mul" [label="[]", style=solid]; -"1246 bert/encoder/layer_9/attention/self/Mul" -> "1247 bert/encoder/layer_9/attention/self/add" [label="[]", style=solid]; -"1247 bert/encoder/layer_9/attention/self/add" -> "1248 bert/encoder/layer_9/attention/self/Softmax" [label="[]", style=solid]; -"1248 bert/encoder/layer_9/attention/self/Softmax" -> "1249 bert/encoder/layer_9/attention/self/MatMul_1" [label="[]", style=solid]; -"1249 bert/encoder/layer_9/attention/self/MatMul_1" -> "1250 QuantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1^0_1" [label="[]", style=solid]; -"1250 QuantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1^0_1" -> "1251 DequantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; -"1251 DequantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1^0_1" -> "1252 bert/encoder/layer_9/attention/self/transpose_3" [label="[]", style=solid]; -"1252 bert/encoder/layer_9/attention/self/transpose_3" -> "1253 bert/encoder/layer_9/attention/self/Reshape_3" [label="[]", style=solid]; -"1253 bert/encoder/layer_9/attention/self/Reshape_3" -> "1256 bert/encoder/layer_9/attention/output/dense/MatMul" [label="[]", style=solid]; -"1254 QuantizeLinear_bert/encoder/layer_9/attention/output/dense/kernel^0_1" -> "1255 DequantizeLinear_bert/encoder/layer_9/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; -"1255 DequantizeLinear_bert/encoder/layer_9/attention/output/dense/kernel^0_1" -> "1256 bert/encoder/layer_9/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; -"1256 bert/encoder/layer_9/attention/output/dense/MatMul" -> "1257 bert/encoder/layer_9/attention/output/dense/BiasAdd" [label="[]", style=solid]; -"1257 bert/encoder/layer_9/attention/output/dense/BiasAdd" -> "1258 bert/encoder/layer_9/attention/output/add" [label="[]", style=solid]; -"1258 bert/encoder/layer_9/attention/output/add" -> "1259 bert/encoder/layer_9/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"1258 bert/encoder/layer_9/attention/output/add" -> "1261 bert/encoder/layer_9/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1258 bert/encoder/layer_9/attention/output/add" -> "1270 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1259 bert/encoder/layer_9/attention/output/LayerNorm/moments/mean" -> "1260 bert/encoder/layer_9/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"1259 bert/encoder/layer_9/attention/output/LayerNorm/moments/mean" -> "1268 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1260 bert/encoder/layer_9/attention/output/LayerNorm/moments/StopGradient" -> "1261 bert/encoder/layer_9/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1261 bert/encoder/layer_9/attention/output/LayerNorm/moments/SquaredDifference" -> "1262 bert/encoder/layer_9/attention/output/LayerNorm/moments/SquaredDifference__435" [label="[]", style=solid]; -"1262 bert/encoder/layer_9/attention/output/LayerNorm/moments/SquaredDifference__435" -> "1263 bert/encoder/layer_9/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"1263 bert/encoder/layer_9/attention/output/LayerNorm/moments/variance" -> "1264 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"1264 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add" -> "1265 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"1265 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/Rsqrt" -> "1266 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/Rsqrt__437" [label="[]", style=solid]; -"1266 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/Rsqrt__437" -> "1267 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"1267 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul" -> "1268 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1267 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul" -> "1270 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1268 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_2" -> "1269 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"1269 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/sub" -> "1271 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1270 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_1" -> "1271 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1271 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1" -> "1272 QuantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"1271 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1" -> "1292 bert/encoder/layer_9/output/add" [label="[]", style=solid]; -"1272 QuantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1273 DequantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"1273 DequantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1276 bert/encoder/layer_9/intermediate/dense/MatMul" [label="[]", style=solid]; -"1274 QuantizeLinear_bert/encoder/layer_9/intermediate/dense/kernel^0_1" -> "1275 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; -"1275 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/kernel^0_1" -> "1276 bert/encoder/layer_9/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; -"1276 bert/encoder/layer_9/intermediate/dense/MatMul" -> "1277 bert/encoder/layer_9/intermediate/dense/BiasAdd" [label="[]", style=solid]; -"1277 bert/encoder/layer_9/intermediate/dense/BiasAdd" -> "1278 bert/encoder/layer_9/intermediate/dense/Pow" [label="[]", style=solid]; -"1277 bert/encoder/layer_9/intermediate/dense/BiasAdd" -> "1280 bert/encoder/layer_9/intermediate/dense/add" [label="[]", style=solid]; -"1277 bert/encoder/layer_9/intermediate/dense/BiasAdd" -> "1285 bert/encoder/layer_9/intermediate/dense/mul_3" [label="[]", style=solid]; -"1278 bert/encoder/layer_9/intermediate/dense/Pow" -> "1279 bert/encoder/layer_9/intermediate/dense/mul" [label="[]", style=solid]; -"1279 bert/encoder/layer_9/intermediate/dense/mul" -> "1280 bert/encoder/layer_9/intermediate/dense/add" [label="[]", style=solid]; -"1280 bert/encoder/layer_9/intermediate/dense/add" -> "1281 bert/encoder/layer_9/intermediate/dense/mul_1" [label="[]", style=solid]; -"1281 bert/encoder/layer_9/intermediate/dense/mul_1" -> "1282 bert/encoder/layer_9/intermediate/dense/Tanh" [label="[]", style=solid]; -"1282 bert/encoder/layer_9/intermediate/dense/Tanh" -> "1283 bert/encoder/layer_9/intermediate/dense/add_1" [label="[]", style=solid]; -"1283 bert/encoder/layer_9/intermediate/dense/add_1" -> "1284 bert/encoder/layer_9/intermediate/dense/mul_2" [label="[]", style=solid]; -"1284 bert/encoder/layer_9/intermediate/dense/mul_2" -> "1285 bert/encoder/layer_9/intermediate/dense/mul_3" [label="[]", style=solid]; -"1285 bert/encoder/layer_9/intermediate/dense/mul_3" -> "1286 QuantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; -"1286 QuantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3^0_1" -> "1287 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; -"1287 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3^0_1" -> "1290 bert/encoder/layer_9/output/dense/MatMul" [label="[]", style=solid]; -"1288 QuantizeLinear_bert/encoder/layer_9/output/dense/kernel^0_1" -> "1289 DequantizeLinear_bert/encoder/layer_9/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; -"1289 DequantizeLinear_bert/encoder/layer_9/output/dense/kernel^0_1" -> "1290 bert/encoder/layer_9/output/dense/MatMul" [label="[3072, 768]", style=solid]; -"1290 bert/encoder/layer_9/output/dense/MatMul" -> "1291 bert/encoder/layer_9/output/dense/BiasAdd" [label="[]", style=solid]; -"1291 bert/encoder/layer_9/output/dense/BiasAdd" -> "1292 bert/encoder/layer_9/output/add" [label="[]", style=solid]; -"1292 bert/encoder/layer_9/output/add" -> "1293 bert/encoder/layer_9/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"1292 bert/encoder/layer_9/output/add" -> "1295 bert/encoder/layer_9/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1292 bert/encoder/layer_9/output/add" -> "1304 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1293 bert/encoder/layer_9/output/LayerNorm/moments/mean" -> "1294 bert/encoder/layer_9/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"1293 bert/encoder/layer_9/output/LayerNorm/moments/mean" -> "1302 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1294 bert/encoder/layer_9/output/LayerNorm/moments/StopGradient" -> "1295 bert/encoder/layer_9/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1295 bert/encoder/layer_9/output/LayerNorm/moments/SquaredDifference" -> "1296 bert/encoder/layer_9/output/LayerNorm/moments/SquaredDifference__439" [label="[]", style=solid]; -"1296 bert/encoder/layer_9/output/LayerNorm/moments/SquaredDifference__439" -> "1297 bert/encoder/layer_9/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"1297 bert/encoder/layer_9/output/LayerNorm/moments/variance" -> "1298 bert/encoder/layer_9/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"1298 bert/encoder/layer_9/output/LayerNorm/batchnorm/add" -> "1299 bert/encoder/layer_9/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"1299 bert/encoder/layer_9/output/LayerNorm/batchnorm/Rsqrt" -> "1300 bert/encoder/layer_9/output/LayerNorm/batchnorm/Rsqrt__441" [label="[]", style=solid]; -"1300 bert/encoder/layer_9/output/LayerNorm/batchnorm/Rsqrt__441" -> "1301 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"1301 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul" -> "1302 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1301 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul" -> "1304 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1302 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_2" -> "1303 bert/encoder/layer_9/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"1303 bert/encoder/layer_9/output/LayerNorm/batchnorm/sub" -> "1305 bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1304 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_1" -> "1305 bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1305 bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1" -> "1306 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"1305 bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1" -> "1310 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; -"1305 bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1" -> "1312 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; -"1305 bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1" -> "1348 bert/encoder/layer_10/attention/output/add" [label="[]", style=solid]; -"1306 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_1" -> "1307 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"1307 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_1" -> "1314 bert/encoder/layer_10/attention/self/value/MatMul" [label="[]", style=solid]; -"1308 QuantizeLinear_bert/encoder/layer_10/attention/self/value/kernel^0_1" -> "1309 DequantizeLinear_bert/encoder/layer_10/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; -"1309 DequantizeLinear_bert/encoder/layer_10/attention/self/value/kernel^0_1" -> "1314 bert/encoder/layer_10/attention/self/value/MatMul" [label="[768, 768]", style=solid]; -"1310 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_2" -> "1311 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; -"1311 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_2" -> "1320 bert/encoder/layer_10/attention/self/query/MatMul" [label="[]", style=solid]; -"1312 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_3" -> "1313 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; -"1313 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_3" -> "1328 bert/encoder/layer_10/attention/self/key/MatMul" [label="[]", style=solid]; -"1314 bert/encoder/layer_10/attention/self/value/MatMul" -> "1315 bert/encoder/layer_10/attention/self/value/BiasAdd" [label="[]", style=solid]; -"1315 bert/encoder/layer_10/attention/self/value/BiasAdd" -> "1316 bert/encoder/layer_10/attention/self/Reshape_2" [label="[]", style=solid]; -"1316 bert/encoder/layer_10/attention/self/Reshape_2" -> "1317 bert/encoder/layer_10/attention/self/transpose_2" [label="[]", style=solid]; -"1317 bert/encoder/layer_10/attention/self/transpose_2" -> "1339 bert/encoder/layer_10/attention/self/MatMul_1" [label="[]", style=solid]; -"1318 QuantizeLinear_bert/encoder/layer_10/attention/self/query/kernel^0_1" -> "1319 DequantizeLinear_bert/encoder/layer_10/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; -"1319 DequantizeLinear_bert/encoder/layer_10/attention/self/query/kernel^0_1" -> "1320 bert/encoder/layer_10/attention/self/query/MatMul" [label="[768, 768]", style=solid]; -"1320 bert/encoder/layer_10/attention/self/query/MatMul" -> "1321 bert/encoder/layer_10/attention/self/query/BiasAdd" [label="[]", style=solid]; -"1321 bert/encoder/layer_10/attention/self/query/BiasAdd" -> "1322 QuantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; -"1322 QuantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd^0_1" -> "1323 DequantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; -"1323 DequantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd^0_1" -> "1324 bert/encoder/layer_10/attention/self/Reshape" [label="[]", style=solid]; -"1324 bert/encoder/layer_10/attention/self/Reshape" -> "1325 bert/encoder/layer_10/attention/self/transpose" [label="[]", style=solid]; -"1325 bert/encoder/layer_10/attention/self/transpose" -> "1335 bert/encoder/layer_10/attention/self/MatMul" [label="[]", style=solid]; -"1326 QuantizeLinear_bert/encoder/layer_10/attention/self/key/kernel^0_1" -> "1327 DequantizeLinear_bert/encoder/layer_10/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; -"1327 DequantizeLinear_bert/encoder/layer_10/attention/self/key/kernel^0_1" -> "1328 bert/encoder/layer_10/attention/self/key/MatMul" [label="[768, 768]", style=solid]; -"1328 bert/encoder/layer_10/attention/self/key/MatMul" -> "1329 bert/encoder/layer_10/attention/self/key/BiasAdd" [label="[]", style=solid]; -"1329 bert/encoder/layer_10/attention/self/key/BiasAdd" -> "1330 QuantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; -"1330 QuantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd^0_1" -> "1331 DequantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; -"1331 DequantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd^0_1" -> "1332 bert/encoder/layer_10/attention/self/Reshape_1" [label="[]", style=solid]; -"1332 bert/encoder/layer_10/attention/self/Reshape_1" -> "1333 bert/encoder/layer_10/attention/self/transpose_1" [label="[]", style=solid]; -"1333 bert/encoder/layer_10/attention/self/transpose_1" -> "1334 bert/encoder/layer_10/attention/self/MatMul__446" [label="[]", style=solid]; -"1334 bert/encoder/layer_10/attention/self/MatMul__446" -> "1335 bert/encoder/layer_10/attention/self/MatMul" [label="[]", style=solid]; -"1335 bert/encoder/layer_10/attention/self/MatMul" -> "1336 bert/encoder/layer_10/attention/self/Mul" [label="[]", style=solid]; -"1336 bert/encoder/layer_10/attention/self/Mul" -> "1337 bert/encoder/layer_10/attention/self/add" [label="[]", style=solid]; -"1337 bert/encoder/layer_10/attention/self/add" -> "1338 bert/encoder/layer_10/attention/self/Softmax" [label="[]", style=solid]; -"1338 bert/encoder/layer_10/attention/self/Softmax" -> "1339 bert/encoder/layer_10/attention/self/MatMul_1" [label="[]", style=solid]; -"1339 bert/encoder/layer_10/attention/self/MatMul_1" -> "1340 QuantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1^0_1" [label="[]", style=solid]; -"1340 QuantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1^0_1" -> "1341 DequantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; -"1341 DequantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1^0_1" -> "1342 bert/encoder/layer_10/attention/self/transpose_3" [label="[]", style=solid]; -"1342 bert/encoder/layer_10/attention/self/transpose_3" -> "1343 bert/encoder/layer_10/attention/self/Reshape_3" [label="[]", style=solid]; -"1343 bert/encoder/layer_10/attention/self/Reshape_3" -> "1346 bert/encoder/layer_10/attention/output/dense/MatMul" [label="[]", style=solid]; -"1344 QuantizeLinear_bert/encoder/layer_10/attention/output/dense/kernel^0_1" -> "1345 DequantizeLinear_bert/encoder/layer_10/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; -"1345 DequantizeLinear_bert/encoder/layer_10/attention/output/dense/kernel^0_1" -> "1346 bert/encoder/layer_10/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; -"1346 bert/encoder/layer_10/attention/output/dense/MatMul" -> "1347 bert/encoder/layer_10/attention/output/dense/BiasAdd" [label="[]", style=solid]; -"1347 bert/encoder/layer_10/attention/output/dense/BiasAdd" -> "1348 bert/encoder/layer_10/attention/output/add" [label="[]", style=solid]; -"1348 bert/encoder/layer_10/attention/output/add" -> "1349 bert/encoder/layer_10/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"1348 bert/encoder/layer_10/attention/output/add" -> "1351 bert/encoder/layer_10/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1348 bert/encoder/layer_10/attention/output/add" -> "1360 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1349 bert/encoder/layer_10/attention/output/LayerNorm/moments/mean" -> "1350 bert/encoder/layer_10/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"1349 bert/encoder/layer_10/attention/output/LayerNorm/moments/mean" -> "1358 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1350 bert/encoder/layer_10/attention/output/LayerNorm/moments/StopGradient" -> "1351 bert/encoder/layer_10/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1351 bert/encoder/layer_10/attention/output/LayerNorm/moments/SquaredDifference" -> "1352 bert/encoder/layer_10/attention/output/LayerNorm/moments/SquaredDifference__449" [label="[]", style=solid]; -"1352 bert/encoder/layer_10/attention/output/LayerNorm/moments/SquaredDifference__449" -> "1353 bert/encoder/layer_10/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"1353 bert/encoder/layer_10/attention/output/LayerNorm/moments/variance" -> "1354 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"1354 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add" -> "1355 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"1355 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/Rsqrt" -> "1356 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/Rsqrt__451" [label="[]", style=solid]; -"1356 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/Rsqrt__451" -> "1357 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"1357 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul" -> "1358 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1357 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul" -> "1360 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1358 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_2" -> "1359 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"1359 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/sub" -> "1361 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1360 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_1" -> "1361 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1361 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1" -> "1362 QuantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"1361 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1" -> "1382 bert/encoder/layer_10/output/add" [label="[]", style=solid]; -"1362 QuantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1363 DequantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"1363 DequantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1366 bert/encoder/layer_10/intermediate/dense/MatMul" [label="[]", style=solid]; -"1364 QuantizeLinear_bert/encoder/layer_10/intermediate/dense/kernel^0_1" -> "1365 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; -"1365 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/kernel^0_1" -> "1366 bert/encoder/layer_10/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; -"1366 bert/encoder/layer_10/intermediate/dense/MatMul" -> "1367 bert/encoder/layer_10/intermediate/dense/BiasAdd" [label="[]", style=solid]; -"1367 bert/encoder/layer_10/intermediate/dense/BiasAdd" -> "1368 bert/encoder/layer_10/intermediate/dense/Pow" [label="[]", style=solid]; -"1367 bert/encoder/layer_10/intermediate/dense/BiasAdd" -> "1370 bert/encoder/layer_10/intermediate/dense/add" [label="[]", style=solid]; -"1367 bert/encoder/layer_10/intermediate/dense/BiasAdd" -> "1375 bert/encoder/layer_10/intermediate/dense/mul_3" [label="[]", style=solid]; -"1368 bert/encoder/layer_10/intermediate/dense/Pow" -> "1369 bert/encoder/layer_10/intermediate/dense/mul" [label="[]", style=solid]; -"1369 bert/encoder/layer_10/intermediate/dense/mul" -> "1370 bert/encoder/layer_10/intermediate/dense/add" [label="[]", style=solid]; -"1370 bert/encoder/layer_10/intermediate/dense/add" -> "1371 bert/encoder/layer_10/intermediate/dense/mul_1" [label="[]", style=solid]; -"1371 bert/encoder/layer_10/intermediate/dense/mul_1" -> "1372 bert/encoder/layer_10/intermediate/dense/Tanh" [label="[]", style=solid]; -"1372 bert/encoder/layer_10/intermediate/dense/Tanh" -> "1373 bert/encoder/layer_10/intermediate/dense/add_1" [label="[]", style=solid]; -"1373 bert/encoder/layer_10/intermediate/dense/add_1" -> "1374 bert/encoder/layer_10/intermediate/dense/mul_2" [label="[]", style=solid]; -"1374 bert/encoder/layer_10/intermediate/dense/mul_2" -> "1375 bert/encoder/layer_10/intermediate/dense/mul_3" [label="[]", style=solid]; -"1375 bert/encoder/layer_10/intermediate/dense/mul_3" -> "1376 QuantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; -"1376 QuantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3^0_1" -> "1377 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; -"1377 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3^0_1" -> "1380 bert/encoder/layer_10/output/dense/MatMul" [label="[]", style=solid]; -"1378 QuantizeLinear_bert/encoder/layer_10/output/dense/kernel^0_1" -> "1379 DequantizeLinear_bert/encoder/layer_10/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; -"1379 DequantizeLinear_bert/encoder/layer_10/output/dense/kernel^0_1" -> "1380 bert/encoder/layer_10/output/dense/MatMul" [label="[3072, 768]", style=solid]; -"1380 bert/encoder/layer_10/output/dense/MatMul" -> "1381 bert/encoder/layer_10/output/dense/BiasAdd" [label="[]", style=solid]; -"1381 bert/encoder/layer_10/output/dense/BiasAdd" -> "1382 bert/encoder/layer_10/output/add" [label="[]", style=solid]; -"1382 bert/encoder/layer_10/output/add" -> "1383 bert/encoder/layer_10/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"1382 bert/encoder/layer_10/output/add" -> "1385 bert/encoder/layer_10/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1382 bert/encoder/layer_10/output/add" -> "1394 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1383 bert/encoder/layer_10/output/LayerNorm/moments/mean" -> "1384 bert/encoder/layer_10/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"1383 bert/encoder/layer_10/output/LayerNorm/moments/mean" -> "1392 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1384 bert/encoder/layer_10/output/LayerNorm/moments/StopGradient" -> "1385 bert/encoder/layer_10/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1385 bert/encoder/layer_10/output/LayerNorm/moments/SquaredDifference" -> "1386 bert/encoder/layer_10/output/LayerNorm/moments/SquaredDifference__453" [label="[]", style=solid]; -"1386 bert/encoder/layer_10/output/LayerNorm/moments/SquaredDifference__453" -> "1387 bert/encoder/layer_10/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"1387 bert/encoder/layer_10/output/LayerNorm/moments/variance" -> "1388 bert/encoder/layer_10/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"1388 bert/encoder/layer_10/output/LayerNorm/batchnorm/add" -> "1389 bert/encoder/layer_10/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"1389 bert/encoder/layer_10/output/LayerNorm/batchnorm/Rsqrt" -> "1390 bert/encoder/layer_10/output/LayerNorm/batchnorm/Rsqrt__455" [label="[]", style=solid]; -"1390 bert/encoder/layer_10/output/LayerNorm/batchnorm/Rsqrt__455" -> "1391 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"1391 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul" -> "1392 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1391 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul" -> "1394 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1392 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_2" -> "1393 bert/encoder/layer_10/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"1393 bert/encoder/layer_10/output/LayerNorm/batchnorm/sub" -> "1395 bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1394 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_1" -> "1395 bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1395 bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1" -> "1396 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"1395 bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1" -> "1400 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; -"1395 bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1" -> "1402 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; -"1395 bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1" -> "1438 bert/encoder/layer_11/attention/output/add" [label="[]", style=solid]; -"1396 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_1" -> "1397 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"1397 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_1" -> "1404 bert/encoder/layer_11/attention/self/value/MatMul" [label="[]", style=solid]; -"1398 QuantizeLinear_bert/encoder/layer_11/attention/self/value/kernel^0_1" -> "1399 DequantizeLinear_bert/encoder/layer_11/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; -"1399 DequantizeLinear_bert/encoder/layer_11/attention/self/value/kernel^0_1" -> "1404 bert/encoder/layer_11/attention/self/value/MatMul" [label="[768, 768]", style=solid]; -"1400 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_2" -> "1401 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; -"1401 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_2" -> "1410 bert/encoder/layer_11/attention/self/query/MatMul" [label="[]", style=solid]; -"1402 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_3" -> "1403 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; -"1403 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_3" -> "1418 bert/encoder/layer_11/attention/self/key/MatMul" [label="[]", style=solid]; -"1404 bert/encoder/layer_11/attention/self/value/MatMul" -> "1405 bert/encoder/layer_11/attention/self/value/BiasAdd" [label="[]", style=solid]; -"1405 bert/encoder/layer_11/attention/self/value/BiasAdd" -> "1406 bert/encoder/layer_11/attention/self/Reshape_2" [label="[]", style=solid]; -"1406 bert/encoder/layer_11/attention/self/Reshape_2" -> "1407 bert/encoder/layer_11/attention/self/transpose_2" [label="[]", style=solid]; -"1407 bert/encoder/layer_11/attention/self/transpose_2" -> "1429 bert/encoder/layer_11/attention/self/MatMul_1" [label="[]", style=solid]; -"1408 QuantizeLinear_bert/encoder/layer_11/attention/self/query/kernel^0_1" -> "1409 DequantizeLinear_bert/encoder/layer_11/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; -"1409 DequantizeLinear_bert/encoder/layer_11/attention/self/query/kernel^0_1" -> "1410 bert/encoder/layer_11/attention/self/query/MatMul" [label="[768, 768]", style=solid]; -"1410 bert/encoder/layer_11/attention/self/query/MatMul" -> "1411 bert/encoder/layer_11/attention/self/query/BiasAdd" [label="[]", style=solid]; -"1411 bert/encoder/layer_11/attention/self/query/BiasAdd" -> "1412 QuantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; -"1412 QuantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd^0_1" -> "1413 DequantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; -"1413 DequantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd^0_1" -> "1414 bert/encoder/layer_11/attention/self/Reshape" [label="[]", style=solid]; -"1414 bert/encoder/layer_11/attention/self/Reshape" -> "1415 bert/encoder/layer_11/attention/self/transpose" [label="[]", style=solid]; -"1415 bert/encoder/layer_11/attention/self/transpose" -> "1425 bert/encoder/layer_11/attention/self/MatMul" [label="[]", style=solid]; -"1416 QuantizeLinear_bert/encoder/layer_11/attention/self/key/kernel^0_1" -> "1417 DequantizeLinear_bert/encoder/layer_11/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; -"1417 DequantizeLinear_bert/encoder/layer_11/attention/self/key/kernel^0_1" -> "1418 bert/encoder/layer_11/attention/self/key/MatMul" [label="[768, 768]", style=solid]; -"1418 bert/encoder/layer_11/attention/self/key/MatMul" -> "1419 bert/encoder/layer_11/attention/self/key/BiasAdd" [label="[]", style=solid]; -"1419 bert/encoder/layer_11/attention/self/key/BiasAdd" -> "1420 QuantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; -"1420 QuantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd^0_1" -> "1421 DequantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; -"1421 DequantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd^0_1" -> "1422 bert/encoder/layer_11/attention/self/Reshape_1" [label="[]", style=solid]; -"1422 bert/encoder/layer_11/attention/self/Reshape_1" -> "1423 bert/encoder/layer_11/attention/self/transpose_1" [label="[]", style=solid]; -"1423 bert/encoder/layer_11/attention/self/transpose_1" -> "1424 bert/encoder/layer_11/attention/self/MatMul__460" [label="[]", style=solid]; -"1424 bert/encoder/layer_11/attention/self/MatMul__460" -> "1425 bert/encoder/layer_11/attention/self/MatMul" [label="[]", style=solid]; -"1425 bert/encoder/layer_11/attention/self/MatMul" -> "1426 bert/encoder/layer_11/attention/self/Mul" [label="[]", style=solid]; -"1426 bert/encoder/layer_11/attention/self/Mul" -> "1427 bert/encoder/layer_11/attention/self/add" [label="[]", style=solid]; -"1427 bert/encoder/layer_11/attention/self/add" -> "1428 bert/encoder/layer_11/attention/self/Softmax" [label="[]", style=solid]; -"1428 bert/encoder/layer_11/attention/self/Softmax" -> "1429 bert/encoder/layer_11/attention/self/MatMul_1" [label="[]", style=solid]; -"1429 bert/encoder/layer_11/attention/self/MatMul_1" -> "1430 QuantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1^0_1" [label="[]", style=solid]; -"1430 QuantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1^0_1" -> "1431 DequantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; -"1431 DequantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1^0_1" -> "1432 bert/encoder/layer_11/attention/self/transpose_3" [label="[]", style=solid]; -"1432 bert/encoder/layer_11/attention/self/transpose_3" -> "1433 bert/encoder/layer_11/attention/self/Reshape_3" [label="[]", style=solid]; -"1433 bert/encoder/layer_11/attention/self/Reshape_3" -> "1436 bert/encoder/layer_11/attention/output/dense/MatMul" [label="[]", style=solid]; -"1434 QuantizeLinear_bert/encoder/layer_11/attention/output/dense/kernel^0_1" -> "1435 DequantizeLinear_bert/encoder/layer_11/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; -"1435 DequantizeLinear_bert/encoder/layer_11/attention/output/dense/kernel^0_1" -> "1436 bert/encoder/layer_11/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; -"1436 bert/encoder/layer_11/attention/output/dense/MatMul" -> "1437 bert/encoder/layer_11/attention/output/dense/BiasAdd" [label="[]", style=solid]; -"1437 bert/encoder/layer_11/attention/output/dense/BiasAdd" -> "1438 bert/encoder/layer_11/attention/output/add" [label="[]", style=solid]; -"1438 bert/encoder/layer_11/attention/output/add" -> "1439 bert/encoder/layer_11/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"1438 bert/encoder/layer_11/attention/output/add" -> "1441 bert/encoder/layer_11/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1438 bert/encoder/layer_11/attention/output/add" -> "1450 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1439 bert/encoder/layer_11/attention/output/LayerNorm/moments/mean" -> "1440 bert/encoder/layer_11/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"1439 bert/encoder/layer_11/attention/output/LayerNorm/moments/mean" -> "1448 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1440 bert/encoder/layer_11/attention/output/LayerNorm/moments/StopGradient" -> "1441 bert/encoder/layer_11/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1441 bert/encoder/layer_11/attention/output/LayerNorm/moments/SquaredDifference" -> "1442 bert/encoder/layer_11/attention/output/LayerNorm/moments/SquaredDifference__463" [label="[]", style=solid]; -"1442 bert/encoder/layer_11/attention/output/LayerNorm/moments/SquaredDifference__463" -> "1443 bert/encoder/layer_11/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"1443 bert/encoder/layer_11/attention/output/LayerNorm/moments/variance" -> "1444 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"1444 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add" -> "1445 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"1445 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/Rsqrt" -> "1446 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/Rsqrt__465" [label="[]", style=solid]; -"1446 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/Rsqrt__465" -> "1447 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"1447 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul" -> "1448 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1447 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul" -> "1450 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1448 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_2" -> "1449 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"1449 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/sub" -> "1451 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1450 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_1" -> "1451 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1451 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1" -> "1452 QuantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"1451 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1" -> "1472 bert/encoder/layer_11/output/add" [label="[]", style=solid]; -"1452 QuantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1453 DequantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"1453 DequantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1456 bert/encoder/layer_11/intermediate/dense/MatMul" [label="[]", style=solid]; -"1454 QuantizeLinear_bert/encoder/layer_11/intermediate/dense/kernel^0_1" -> "1455 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; -"1455 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/kernel^0_1" -> "1456 bert/encoder/layer_11/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; -"1456 bert/encoder/layer_11/intermediate/dense/MatMul" -> "1457 bert/encoder/layer_11/intermediate/dense/BiasAdd" [label="[]", style=solid]; -"1457 bert/encoder/layer_11/intermediate/dense/BiasAdd" -> "1458 bert/encoder/layer_11/intermediate/dense/Pow" [label="[]", style=solid]; -"1457 bert/encoder/layer_11/intermediate/dense/BiasAdd" -> "1460 bert/encoder/layer_11/intermediate/dense/add" [label="[]", style=solid]; -"1457 bert/encoder/layer_11/intermediate/dense/BiasAdd" -> "1465 bert/encoder/layer_11/intermediate/dense/mul_3" [label="[]", style=solid]; -"1458 bert/encoder/layer_11/intermediate/dense/Pow" -> "1459 bert/encoder/layer_11/intermediate/dense/mul" [label="[]", style=solid]; -"1459 bert/encoder/layer_11/intermediate/dense/mul" -> "1460 bert/encoder/layer_11/intermediate/dense/add" [label="[]", style=solid]; -"1460 bert/encoder/layer_11/intermediate/dense/add" -> "1461 bert/encoder/layer_11/intermediate/dense/mul_1" [label="[]", style=solid]; -"1461 bert/encoder/layer_11/intermediate/dense/mul_1" -> "1462 bert/encoder/layer_11/intermediate/dense/Tanh" [label="[]", style=solid]; -"1462 bert/encoder/layer_11/intermediate/dense/Tanh" -> "1463 bert/encoder/layer_11/intermediate/dense/add_1" [label="[]", style=solid]; -"1463 bert/encoder/layer_11/intermediate/dense/add_1" -> "1464 bert/encoder/layer_11/intermediate/dense/mul_2" [label="[]", style=solid]; -"1464 bert/encoder/layer_11/intermediate/dense/mul_2" -> "1465 bert/encoder/layer_11/intermediate/dense/mul_3" [label="[]", style=solid]; -"1465 bert/encoder/layer_11/intermediate/dense/mul_3" -> "1466 QuantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; -"1466 QuantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3^0_1" -> "1467 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; -"1467 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3^0_1" -> "1470 bert/encoder/layer_11/output/dense/MatMul" [label="[]", style=solid]; -"1468 QuantizeLinear_bert/encoder/layer_11/output/dense/kernel^0_1" -> "1469 DequantizeLinear_bert/encoder/layer_11/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; -"1469 DequantizeLinear_bert/encoder/layer_11/output/dense/kernel^0_1" -> "1470 bert/encoder/layer_11/output/dense/MatMul" [label="[3072, 768]", style=solid]; -"1470 bert/encoder/layer_11/output/dense/MatMul" -> "1471 bert/encoder/layer_11/output/dense/BiasAdd" [label="[]", style=solid]; -"1471 bert/encoder/layer_11/output/dense/BiasAdd" -> "1472 bert/encoder/layer_11/output/add" [label="[]", style=solid]; -"1472 bert/encoder/layer_11/output/add" -> "1473 bert/encoder/layer_11/output/LayerNorm/moments/mean" [label="[]", style=solid]; -"1472 bert/encoder/layer_11/output/add" -> "1475 bert/encoder/layer_11/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1472 bert/encoder/layer_11/output/add" -> "1484 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1473 bert/encoder/layer_11/output/LayerNorm/moments/mean" -> "1474 bert/encoder/layer_11/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; -"1473 bert/encoder/layer_11/output/LayerNorm/moments/mean" -> "1482 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1474 bert/encoder/layer_11/output/LayerNorm/moments/StopGradient" -> "1475 bert/encoder/layer_11/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; -"1475 bert/encoder/layer_11/output/LayerNorm/moments/SquaredDifference" -> "1476 bert/encoder/layer_11/output/LayerNorm/moments/SquaredDifference__467" [label="[]", style=solid]; -"1476 bert/encoder/layer_11/output/LayerNorm/moments/SquaredDifference__467" -> "1477 bert/encoder/layer_11/output/LayerNorm/moments/variance" [label="[]", style=solid]; -"1477 bert/encoder/layer_11/output/LayerNorm/moments/variance" -> "1478 bert/encoder/layer_11/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; -"1478 bert/encoder/layer_11/output/LayerNorm/batchnorm/add" -> "1479 bert/encoder/layer_11/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; -"1479 bert/encoder/layer_11/output/LayerNorm/batchnorm/Rsqrt" -> "1480 bert/encoder/layer_11/output/LayerNorm/batchnorm/Rsqrt__469" [label="[]", style=solid]; -"1480 bert/encoder/layer_11/output/LayerNorm/batchnorm/Rsqrt__469" -> "1481 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; -"1481 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul" -> "1482 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; -"1481 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul" -> "1484 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; -"1482 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_2" -> "1483 bert/encoder/layer_11/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; -"1483 bert/encoder/layer_11/output/LayerNorm/batchnorm/sub" -> "1485 bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1484 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_1" -> "1485 bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; -"1485 bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1" -> "1486 QuantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; -"1486 QuantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1^0_1" -> "1487 DequantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; -"1487 DequantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1^0_1" -> "1488 bert/encoder/Reshape_13" [label="[]", style=solid]; -"1488 bert/encoder/Reshape_13" -> "1489 Shape_1" [label="[]", style=solid]; -"1488 bert/encoder/Reshape_13" -> "1501 Reshape" [label="[]", style=solid]; -"1489 Shape_1" -> "1490 Shape_1__472" [label="[-1]", style=dashed]; -"1490 Shape_1__472" -> "1491 strided_slice_1" [label="[-1]", style=solid]; -"1491 strided_slice_1" -> "1492 strided_slice_1__476" [label="[-1]", style=solid]; -"1492 strided_slice_1__476" -> "1493 strided_slice_1__477" [label="[]", style=solid]; -"1493 strided_slice_1__477" -> "1494 mul" [label="[]", style=dashed]; -"1493 strided_slice_1__477" -> "1498 Reshape_1/shape_Unsqueeze__478" [label="[]", style=dashed]; -"1494 mul" -> "1495 Reshape/shape_Unsqueeze__482" [label="[]", style=dashed]; -"1495 Reshape/shape_Unsqueeze__482" -> "1496 Reshape/shape_Concat__484" [label="[1]", style=dashed]; -"1496 Reshape/shape_Concat__484" -> "1497 Reshape__485" [label="[2]", style=dashed]; -"1497 Reshape__485" -> "1501 Reshape" [label="[2]", style=dashed]; -"1498 Reshape_1/shape_Unsqueeze__478" -> "1499 Reshape_1/shape_Concat__481" [label="[1]", style=dashed]; -"1499 Reshape_1/shape_Concat__481" -> "1500 Reshape_1__487" [label="[3]", style=dashed]; -"1500 Reshape_1__487" -> "1506 Reshape_1" [label="[3]", style=dashed]; -"1501 Reshape" -> "1504 MatMul" [label="[]", style=solid]; -"1502 QuantizeLinear_MatMul__486^0_1" -> "1503 DequantizeLinear_MatMul__486^0_1" [label="[768, 2]", style=dashed]; -"1503 DequantizeLinear_MatMul__486^0_1" -> "1504 MatMul" [label="[768, 2]", style=solid]; -"1504 MatMul" -> "1505 BiasAdd" [label="[]", style=solid]; -"1505 BiasAdd" -> "1506 Reshape_1" [label="[]", style=solid]; -"1506 Reshape_1" -> "1507 transpose" [label="[]", style=solid]; -"1507 transpose" -> "1508 unstack" [label="[]", style=solid]; -"1508 unstack" -> "1509 unstack__490" [label="[]", style=solid]; -"1508 unstack" -> "1511 unstack__488" [label="[]", style=solid]; -"1509 unstack__490" -> "1510 unstack_graph_outputs_Identity__4" [label="[]", style=solid]; -"1510 unstack_graph_outputs_Identity__4" -> "1517 nncf_model_output_0" [label="[-1, 256]", style=solid]; -"1511 unstack__488" -> "1512 unstack_graph_outputs_Identity__7" [label="[]", style=solid]; -"1512 unstack_graph_outputs_Identity__7" -> "1518 nncf_model_output_1" [label="[-1, 256]", style=solid]; -"1513 nncf_model_input_0" -> "0 unique_ids_graph_outputs_Identity__10" [label="[-1]", style=dashed]; -"1514 nncf_model_input_1" -> "185 bert/embeddings/Reshape_2" [label="[-1, 256]", style=dashed]; -"1515 nncf_model_input_2" -> "140 bert/encoder/Reshape" [label="[-1, 256]", style=dashed]; -"1516 nncf_model_input_3" -> "123 bert/encoder/Shape" [label="[-1, 256]", style=dashed]; -"1516 nncf_model_input_3" -> "189 bert/embeddings/ExpandDims" [label="[-1, 256]", style=dashed]; +"1 Constant_nncf_1" [id=1, type=Constant]; +"2 bert/encoder/ones/packed_Unsqueeze__20" [id=2, type=Unsqueeze]; +"3 Constant_nncf_3" [id=3, type=Constant]; +"4 bert/encoder/ones/packed_Unsqueeze__19" [id=4, type=Unsqueeze]; +"5 Constant_nncf_5" [id=5, type=Constant]; +"6 bert/encoder/layer_9/attention/self/Reshape_3/shape_Unsqueeze__83" [id=6, type=Unsqueeze]; +"7 Constant_nncf_7" [id=7, type=Constant]; +"8 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__88" [id=8, type=Unsqueeze]; +"9 Constant_nncf_9" [id=9, type=Constant]; +"10 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__87" [id=10, type=Unsqueeze]; +"11 Constant_nncf_11" [id=11, type=Constant]; +"12 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__86" [id=12, type=Unsqueeze]; +"13 Constant_nncf_13" [id=13, type=Constant]; +"14 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__93" [id=14, type=Unsqueeze]; +"15 Constant_nncf_15" [id=15, type=Constant]; +"16 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__92" [id=16, type=Unsqueeze]; +"17 Constant_nncf_17" [id=17, type=Constant]; +"18 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__91" [id=18, type=Unsqueeze]; +"19 Constant_nncf_19" [id=19, type=Constant]; +"20 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__98" [id=20, type=Unsqueeze]; +"21 Constant_nncf_21" [id=21, type=Constant]; +"22 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__97" [id=22, type=Unsqueeze]; +"23 Constant_nncf_23" [id=23, type=Constant]; +"24 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__96" [id=24, type=Unsqueeze]; +"25 Constant_nncf_25" [id=25, type=Constant]; +"26 bert/encoder/layer_8/attention/self/Reshape_3/shape_Unsqueeze__101" [id=26, type=Unsqueeze]; +"27 Constant_nncf_27" [id=27, type=Constant]; +"28 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__106" [id=28, type=Unsqueeze]; +"29 Constant_nncf_29" [id=29, type=Constant]; +"30 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__105" [id=30, type=Unsqueeze]; +"31 Constant_nncf_31" [id=31, type=Constant]; +"32 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__104" [id=32, type=Unsqueeze]; +"33 Constant_nncf_33" [id=33, type=Constant]; +"34 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__111" [id=34, type=Unsqueeze]; +"35 Constant_nncf_35" [id=35, type=Constant]; +"36 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__110" [id=36, type=Unsqueeze]; +"37 Constant_nncf_37" [id=37, type=Constant]; +"38 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__109" [id=38, type=Unsqueeze]; +"39 Constant_nncf_39" [id=39, type=Constant]; +"40 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__116" [id=40, type=Unsqueeze]; +"41 Constant_nncf_41" [id=41, type=Constant]; +"42 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__115" [id=42, type=Unsqueeze]; +"43 Constant_nncf_43" [id=43, type=Constant]; +"44 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__114" [id=44, type=Unsqueeze]; +"45 Constant_nncf_45" [id=45, type=Constant]; +"46 bert/encoder/layer_7/attention/self/Reshape_3/shape_Unsqueeze__119" [id=46, type=Unsqueeze]; +"47 Constant_nncf_47" [id=47, type=Constant]; +"48 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__124" [id=48, type=Unsqueeze]; +"49 Constant_nncf_49" [id=49, type=Constant]; +"50 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__123" [id=50, type=Unsqueeze]; +"51 Constant_nncf_51" [id=51, type=Constant]; +"52 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__122" [id=52, type=Unsqueeze]; +"53 Constant_nncf_53" [id=53, type=Constant]; +"54 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__129" [id=54, type=Unsqueeze]; +"55 Constant_nncf_55" [id=55, type=Constant]; +"56 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__128" [id=56, type=Unsqueeze]; +"57 Constant_nncf_57" [id=57, type=Constant]; +"58 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__127" [id=58, type=Unsqueeze]; +"59 Constant_nncf_59" [id=59, type=Constant]; +"60 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__134" [id=60, type=Unsqueeze]; +"61 Constant_nncf_61" [id=61, type=Constant]; +"62 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__133" [id=62, type=Unsqueeze]; +"63 Constant_nncf_63" [id=63, type=Constant]; +"64 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__132" [id=64, type=Unsqueeze]; +"65 Constant_nncf_65" [id=65, type=Constant]; +"66 bert/encoder/layer_6/attention/self/Reshape_3/shape_Unsqueeze__137" [id=66, type=Unsqueeze]; +"67 Constant_nncf_67" [id=67, type=Constant]; +"68 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__142" [id=68, type=Unsqueeze]; +"69 Constant_nncf_69" [id=69, type=Constant]; +"70 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__141" [id=70, type=Unsqueeze]; +"71 Constant_nncf_71" [id=71, type=Constant]; +"72 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__140" [id=72, type=Unsqueeze]; +"73 Constant_nncf_73" [id=73, type=Constant]; +"74 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__147" [id=74, type=Unsqueeze]; +"75 Constant_nncf_75" [id=75, type=Constant]; +"76 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__146" [id=76, type=Unsqueeze]; +"77 Constant_nncf_77" [id=77, type=Constant]; +"78 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__145" [id=78, type=Unsqueeze]; +"79 Constant_nncf_79" [id=79, type=Constant]; +"80 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__152" [id=80, type=Unsqueeze]; +"81 Constant_nncf_81" [id=81, type=Constant]; +"82 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__151" [id=82, type=Unsqueeze]; +"83 Constant_nncf_83" [id=83, type=Constant]; +"84 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__150" [id=84, type=Unsqueeze]; +"85 Constant_nncf_85" [id=85, type=Constant]; +"86 bert/encoder/layer_5/attention/self/Reshape_3/shape_Unsqueeze__155" [id=86, type=Unsqueeze]; +"87 Constant_nncf_87" [id=87, type=Constant]; +"88 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__160" [id=88, type=Unsqueeze]; +"89 Constant_nncf_89" [id=89, type=Constant]; +"90 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__159" [id=90, type=Unsqueeze]; +"91 Constant_nncf_91" [id=91, type=Constant]; +"92 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__158" [id=92, type=Unsqueeze]; +"93 Constant_nncf_93" [id=93, type=Constant]; +"94 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__165" [id=94, type=Unsqueeze]; +"95 Constant_nncf_95" [id=95, type=Constant]; +"96 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__164" [id=96, type=Unsqueeze]; +"97 Constant_nncf_97" [id=97, type=Constant]; +"98 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__163" [id=98, type=Unsqueeze]; +"99 Constant_nncf_99" [id=99, type=Constant]; +"100 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__170" [id=100, type=Unsqueeze]; +"101 Constant_nncf_101" [id=101, type=Constant]; +"102 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__169" [id=102, type=Unsqueeze]; +"103 Constant_nncf_103" [id=103, type=Constant]; +"104 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__168" [id=104, type=Unsqueeze]; +"105 Constant_nncf_105" [id=105, type=Constant]; +"106 bert/encoder/layer_4/attention/self/Reshape_3/shape_Unsqueeze__173" [id=106, type=Unsqueeze]; +"107 Constant_nncf_107" [id=107, type=Constant]; +"108 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__178" [id=108, type=Unsqueeze]; +"109 Constant_nncf_109" [id=109, type=Constant]; +"110 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__177" [id=110, type=Unsqueeze]; +"111 Constant_nncf_111" [id=111, type=Constant]; +"112 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__176" [id=112, type=Unsqueeze]; +"113 Constant_nncf_113" [id=113, type=Constant]; +"114 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__183" [id=114, type=Unsqueeze]; +"115 Constant_nncf_115" [id=115, type=Constant]; +"116 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__182" [id=116, type=Unsqueeze]; +"117 Constant_nncf_117" [id=117, type=Constant]; +"118 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__181" [id=118, type=Unsqueeze]; +"119 Constant_nncf_119" [id=119, type=Constant]; +"120 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__188" [id=120, type=Unsqueeze]; +"121 Constant_nncf_121" [id=121, type=Constant]; +"122 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__187" [id=122, type=Unsqueeze]; +"123 Constant_nncf_123" [id=123, type=Constant]; +"124 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__186" [id=124, type=Unsqueeze]; +"125 Constant_nncf_125" [id=125, type=Constant]; +"126 bert/encoder/layer_3/attention/self/Reshape_3/shape_Unsqueeze__191" [id=126, type=Unsqueeze]; +"127 Constant_nncf_127" [id=127, type=Constant]; +"128 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__196" [id=128, type=Unsqueeze]; +"129 Constant_nncf_129" [id=129, type=Constant]; +"130 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__195" [id=130, type=Unsqueeze]; +"131 Constant_nncf_131" [id=131, type=Constant]; +"132 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__194" [id=132, type=Unsqueeze]; +"133 Constant_nncf_133" [id=133, type=Constant]; +"134 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__201" [id=134, type=Unsqueeze]; +"135 Constant_nncf_135" [id=135, type=Constant]; +"136 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__200" [id=136, type=Unsqueeze]; +"137 Constant_nncf_137" [id=137, type=Constant]; +"138 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__199" [id=138, type=Unsqueeze]; +"139 Constant_nncf_139" [id=139, type=Constant]; +"140 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__206" [id=140, type=Unsqueeze]; +"141 Constant_nncf_141" [id=141, type=Constant]; +"142 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__205" [id=142, type=Unsqueeze]; +"143 Constant_nncf_143" [id=143, type=Constant]; +"144 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__204" [id=144, type=Unsqueeze]; +"145 Constant_nncf_145" [id=145, type=Constant]; +"146 bert/encoder/layer_2/attention/self/Reshape_3/shape_Unsqueeze__209" [id=146, type=Unsqueeze]; +"147 Constant_nncf_147" [id=147, type=Constant]; +"148 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__214" [id=148, type=Unsqueeze]; +"149 Constant_nncf_149" [id=149, type=Constant]; +"150 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__213" [id=150, type=Unsqueeze]; +"151 Constant_nncf_151" [id=151, type=Constant]; +"152 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__212" [id=152, type=Unsqueeze]; +"153 Constant_nncf_153" [id=153, type=Constant]; +"154 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__219" [id=154, type=Unsqueeze]; +"155 Constant_nncf_155" [id=155, type=Constant]; +"156 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__218" [id=156, type=Unsqueeze]; +"157 Constant_nncf_157" [id=157, type=Constant]; +"158 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__217" [id=158, type=Unsqueeze]; +"159 Constant_nncf_159" [id=159, type=Constant]; +"160 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__224" [id=160, type=Unsqueeze]; +"161 Constant_nncf_161" [id=161, type=Constant]; +"162 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__223" [id=162, type=Unsqueeze]; +"163 Constant_nncf_163" [id=163, type=Constant]; +"164 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__222" [id=164, type=Unsqueeze]; +"165 Constant_nncf_165" [id=165, type=Constant]; +"166 bert/encoder/layer_11/attention/self/Reshape_3/shape_Unsqueeze__227" [id=166, type=Unsqueeze]; +"167 Constant_nncf_167" [id=167, type=Constant]; +"168 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__232" [id=168, type=Unsqueeze]; +"169 Constant_nncf_169" [id=169, type=Constant]; +"170 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__231" [id=170, type=Unsqueeze]; +"171 Constant_nncf_171" [id=171, type=Constant]; +"172 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__230" [id=172, type=Unsqueeze]; +"173 Constant_nncf_173" [id=173, type=Constant]; +"174 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__237" [id=174, type=Unsqueeze]; +"175 Constant_nncf_175" [id=175, type=Constant]; +"176 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__236" [id=176, type=Unsqueeze]; +"177 Constant_nncf_177" [id=177, type=Constant]; +"178 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__235" [id=178, type=Unsqueeze]; +"179 Constant_nncf_179" [id=179, type=Constant]; +"180 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__242" [id=180, type=Unsqueeze]; +"181 Constant_nncf_181" [id=181, type=Constant]; +"182 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__241" [id=182, type=Unsqueeze]; +"183 Constant_nncf_183" [id=183, type=Constant]; +"184 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__240" [id=184, type=Unsqueeze]; +"185 Constant_nncf_185" [id=185, type=Constant]; +"186 bert/encoder/layer_10/attention/self/Reshape_3/shape_Unsqueeze__245" [id=186, type=Unsqueeze]; +"187 Constant_nncf_187" [id=187, type=Constant]; +"188 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__250" [id=188, type=Unsqueeze]; +"189 Constant_nncf_189" [id=189, type=Constant]; +"190 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__249" [id=190, type=Unsqueeze]; +"191 Constant_nncf_191" [id=191, type=Constant]; +"192 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__248" [id=192, type=Unsqueeze]; +"193 Constant_nncf_193" [id=193, type=Constant]; +"194 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__255" [id=194, type=Unsqueeze]; +"195 Constant_nncf_195" [id=195, type=Constant]; +"196 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__254" [id=196, type=Unsqueeze]; +"197 Constant_nncf_197" [id=197, type=Constant]; +"198 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__253" [id=198, type=Unsqueeze]; +"199 Constant_nncf_199" [id=199, type=Constant]; +"200 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__260" [id=200, type=Unsqueeze]; +"201 Constant_nncf_201" [id=201, type=Constant]; +"202 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__259" [id=202, type=Unsqueeze]; +"203 Constant_nncf_203" [id=203, type=Constant]; +"204 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__258" [id=204, type=Unsqueeze]; +"205 Constant_nncf_205" [id=205, type=Constant]; +"206 bert/encoder/layer_1/attention/self/Reshape_3/shape_Unsqueeze__263" [id=206, type=Unsqueeze]; +"207 Constant_nncf_207" [id=207, type=Constant]; +"208 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__268" [id=208, type=Unsqueeze]; +"209 Constant_nncf_209" [id=209, type=Constant]; +"210 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__267" [id=210, type=Unsqueeze]; +"211 Constant_nncf_211" [id=211, type=Constant]; +"212 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__266" [id=212, type=Unsqueeze]; +"213 Constant_nncf_213" [id=213, type=Constant]; +"214 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__273" [id=214, type=Unsqueeze]; +"215 Constant_nncf_215" [id=215, type=Constant]; +"216 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__272" [id=216, type=Unsqueeze]; +"217 Constant_nncf_217" [id=217, type=Constant]; +"218 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__271" [id=218, type=Unsqueeze]; +"219 Constant_nncf_219" [id=219, type=Constant]; +"220 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__278" [id=220, type=Unsqueeze]; +"221 Constant_nncf_221" [id=221, type=Constant]; +"222 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__277" [id=222, type=Unsqueeze]; +"223 Constant_nncf_223" [id=223, type=Constant]; +"224 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__276" [id=224, type=Unsqueeze]; +"225 Constant_nncf_225" [id=225, type=Constant]; +"226 bert/encoder/layer_0/attention/self/Reshape_3/shape_Unsqueeze__281" [id=226, type=Unsqueeze]; +"227 Constant_nncf_227" [id=227, type=Constant]; +"228 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__286" [id=228, type=Unsqueeze]; +"229 Constant_nncf_229" [id=229, type=Constant]; +"230 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__285" [id=230, type=Unsqueeze]; +"231 Constant_nncf_231" [id=231, type=Constant]; +"232 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__284" [id=232, type=Unsqueeze]; +"233 Constant_nncf_233" [id=233, type=Constant]; +"234 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__291" [id=234, type=Unsqueeze]; +"235 Constant_nncf_235" [id=235, type=Constant]; +"236 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__290" [id=236, type=Unsqueeze]; +"237 Constant_nncf_237" [id=237, type=Constant]; +"238 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__289" [id=238, type=Unsqueeze]; +"239 Constant_nncf_239" [id=239, type=Constant]; +"240 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__296" [id=240, type=Unsqueeze]; +"241 Constant_nncf_241" [id=241, type=Constant]; +"242 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__295" [id=242, type=Unsqueeze]; +"243 Constant_nncf_243" [id=243, type=Constant]; +"244 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__294" [id=244, type=Unsqueeze]; +"245 bert/encoder/Shape" [id=245, type=Shape]; +"246 bert/encoder/Shape__12" [id=246, type=Cast]; +"247 bert/encoder/strided_slice" [id=247, type=Slice]; +"248 Constant_nncf_248" [id=248, type=Constant]; +"249 bert/encoder/strided_slice__16" [id=249, type=Squeeze]; +"250 bert/encoder/strided_slice__17" [id=250, type=Cast]; +"251 Constant_nncf_251" [id=251, type=Constant]; +"252 bert/encoder/ones/packed_Unsqueeze__18" [id=252, type=Unsqueeze]; +"253 bert/encoder/ones/packed_Concat__21" [id=253, type=Concat]; +"254 bert/encoder/ones__22" [id=254, type=Cast]; +"255 bert/encoder/ones" [id=255, type=ConstantOfShape]; +"256 Constant_nncf_256" [id=256, type=Constant]; +"257 bert/encoder/Reshape_13/shape_Unsqueeze__300" [id=257, type=Unsqueeze]; +"258 Constant_nncf_258" [id=258, type=Constant]; +"259 bert/encoder/Reshape_13/shape_Unsqueeze__299" [id=259, type=Unsqueeze]; +"260 bert/encoder/Reshape_1__302" [id=260, type=Cast]; +"261 Constant_nncf_261" [id=261, type=Constant]; +"262 bert/encoder/Reshape/shape_Unsqueeze__23" [id=262, type=Unsqueeze]; +"263 Constant_nncf_263" [id=263, type=Constant]; +"264 bert/encoder/Reshape/shape_Unsqueeze__25" [id=264, type=Unsqueeze]; +"265 Constant_nncf_265" [id=265, type=Constant]; +"266 bert/encoder/Reshape/shape_Unsqueeze__24" [id=266, type=Unsqueeze]; +"267 bert/encoder/Reshape/shape_Concat__26" [id=267, type=Concat]; +"268 bert/encoder/Reshape__27" [id=268, type=Cast]; +"269 bert/encoder/Reshape" [id=269, type=Reshape]; +"270 bert/encoder/Cast" [id=270, type=Cast]; +"271 bert/encoder/mul" [id=271, type=Mul]; +"272 bert/encoder/layer_9/attention/self/ExpandDims" [id=272, type=Reshape]; +"273 bert/encoder/layer_9/attention/self/sub" [id=273, type=Sub]; +"274 bert/encoder/layer_9/attention/self/mul_1" [id=274, type=Mul]; +"275 bert/encoder/layer_8/attention/self/ExpandDims" [id=275, type=Reshape]; +"276 bert/encoder/layer_8/attention/self/sub" [id=276, type=Sub]; +"277 bert/encoder/layer_8/attention/self/mul_1" [id=277, type=Mul]; +"278 bert/encoder/layer_7/attention/self/ExpandDims" [id=278, type=Reshape]; +"279 bert/encoder/layer_7/attention/self/sub" [id=279, type=Sub]; +"280 bert/encoder/layer_7/attention/self/mul_1" [id=280, type=Mul]; +"281 bert/encoder/layer_6/attention/self/ExpandDims" [id=281, type=Reshape]; +"282 bert/encoder/layer_6/attention/self/sub" [id=282, type=Sub]; +"283 bert/encoder/layer_6/attention/self/mul_1" [id=283, type=Mul]; +"284 bert/encoder/layer_5/attention/self/ExpandDims" [id=284, type=Reshape]; +"285 bert/encoder/layer_5/attention/self/sub" [id=285, type=Sub]; +"286 bert/encoder/layer_5/attention/self/mul_1" [id=286, type=Mul]; +"287 bert/encoder/layer_4/attention/self/ExpandDims" [id=287, type=Reshape]; +"288 bert/encoder/layer_4/attention/self/sub" [id=288, type=Sub]; +"289 bert/encoder/layer_4/attention/self/mul_1" [id=289, type=Mul]; +"290 bert/encoder/layer_3/attention/self/ExpandDims" [id=290, type=Reshape]; +"291 bert/encoder/layer_3/attention/self/sub" [id=291, type=Sub]; +"292 bert/encoder/layer_3/attention/self/mul_1" [id=292, type=Mul]; +"293 bert/encoder/layer_2/attention/self/ExpandDims" [id=293, type=Reshape]; +"294 bert/encoder/layer_2/attention/self/sub" [id=294, type=Sub]; +"295 bert/encoder/layer_2/attention/self/mul_1" [id=295, type=Mul]; +"296 bert/encoder/layer_11/attention/self/ExpandDims" [id=296, type=Reshape]; +"297 bert/encoder/layer_11/attention/self/sub" [id=297, type=Sub]; +"298 bert/encoder/layer_11/attention/self/mul_1" [id=298, type=Mul]; +"299 bert/encoder/layer_10/attention/self/ExpandDims" [id=299, type=Reshape]; +"300 bert/encoder/layer_10/attention/self/sub" [id=300, type=Sub]; +"301 bert/encoder/layer_10/attention/self/mul_1" [id=301, type=Mul]; +"302 bert/encoder/layer_1/attention/self/ExpandDims" [id=302, type=Reshape]; +"303 bert/encoder/layer_1/attention/self/sub" [id=303, type=Sub]; +"304 bert/encoder/layer_1/attention/self/mul_1" [id=304, type=Mul]; +"305 bert/encoder/layer_0/attention/self/ExpandDims" [id=305, type=Reshape]; +"306 bert/encoder/layer_0/attention/self/sub" [id=306, type=Sub]; +"307 bert/encoder/layer_0/attention/self/mul_1" [id=307, type=Mul]; +"308 bert/embeddings/Slice" [id=308, type=Slice]; +"309 bert/embeddings/Reshape_4__42" [id=309, type=Cast]; +"310 bert/embeddings/Reshape_4" [id=310, type=Reshape]; +"311 Constant_nncf_311" [id=311, type=Constant]; +"312 bert/embeddings/Reshape_3/shape_Unsqueeze__69" [id=312, type=Unsqueeze]; +"313 Constant_nncf_313" [id=313, type=Constant]; +"314 bert/embeddings/Reshape_3/shape_Unsqueeze__68" [id=314, type=Unsqueeze]; +"315 bert/embeddings/Reshape_2__43" [id=315, type=Cast]; +"316 bert/embeddings/Reshape_2" [id=316, type=Reshape]; +"317 Constant_nncf_317" [id=317, type=Constant]; +"318 bert/embeddings/Reshape_1/shape_Unsqueeze__57" [id=318, type=Unsqueeze]; +"319 Constant_nncf_319" [id=319, type=Constant]; +"320 bert/embeddings/Reshape_1/shape_Unsqueeze__56" [id=320, type=Unsqueeze]; +"321 bert/embeddings/Reshape__59" [id=321, type=Cast]; +"322 bert/embeddings/ExpandDims" [id=322, type=Reshape]; +"323 bert/embeddings/Shape" [id=323, type=Shape]; +"324 bert/embeddings/Shape__49" [id=324, type=Cast]; +"325 bert/embeddings/strided_slice" [id=325, type=Slice]; +"326 Constant_nncf_326" [id=326, type=Constant]; +"327 bert/embeddings/strided_slice__53" [id=327, type=Squeeze]; +"328 bert/embeddings/strided_slice__54" [id=328, type=Cast]; +"329 Constant_nncf_329" [id=329, type=Constant]; +"330 bert/embeddings/Reshape_1/shape_Unsqueeze__55" [id=330, type=Unsqueeze]; +"331 bert/embeddings/Reshape_1/shape_Concat__58" [id=331, type=Concat]; +"332 bert/embeddings/Reshape_1__60" [id=332, type=Cast]; +"333 bert/embeddings/Reshape" [id=333, type=Reshape]; +"334 QuantizeLinear_bert/embeddings/word_embeddings^0_1" [id=334, label="334 QuantizeLinear_bert/embeddings/word_embeddings:0_1", type=QuantizeLinear]; +"335 DequantizeLinear_bert/embeddings/word_embeddings^0_1" [id=335, label="335 DequantizeLinear_bert/embeddings/word_embeddings:0_1", type=DequantizeLinear]; +"336 bert/embeddings/GatherV2" [id=336, type=Gather]; +"337 bert/embeddings/Reshape_1" [id=337, type=Reshape]; +"338 bert/embeddings/Shape_1" [id=338, type=Shape]; +"339 bert/embeddings/Shape_1__61" [id=339, type=Cast]; +"340 bert/embeddings/strided_slice_1" [id=340, type=Slice]; +"341 Constant_nncf_339" [id=341, type=Constant]; +"342 bert/embeddings/strided_slice_1__65" [id=342, type=Squeeze]; +"343 bert/embeddings/strided_slice_1__66" [id=343, type=Cast]; +"344 Constant_nncf_342" [id=344, type=Constant]; +"345 bert/embeddings/Reshape_3/shape_Unsqueeze__67" [id=345, type=Unsqueeze]; +"346 bert/embeddings/Reshape_3/shape_Concat__70" [id=346, type=Concat]; +"347 bert/embeddings/Reshape_3__71" [id=347, type=Cast]; +"348 Constant_nncf_346" [id=348, type=Constant]; +"349 Unsqueeze__46" [id=349, type=Unsqueeze]; +"350 Constant_nncf_348" [id=350, type=Constant]; +"351 Unsqueeze__45" [id=351, type=Unsqueeze]; +"352 Constant_nncf_350" [id=352, type=Constant]; +"353 Unsqueeze__44" [id=353, type=Unsqueeze]; +"354 Constant_nncf_352" [id=354, type=Constant]; +"355 Reshape_1/shape_Unsqueeze__480" [id=355, type=Unsqueeze]; +"356 Constant_nncf_354" [id=356, type=Constant]; +"357 Reshape_1/shape_Unsqueeze__479" [id=357, type=Unsqueeze]; +"358 Constant_nncf_356" [id=358, type=Constant]; +"359 Reshape/shape_Unsqueeze__483" [id=359, type=Unsqueeze]; +"360 MatMul__486" [id=360, type=Transpose]; +"361 Concat__47" [id=361, type=Concat]; +"362 bert/embeddings/one_hot" [id=362, type=OneHot]; +"363 QuantizeLinear_bert/embeddings/one_hot^0_1" [id=363, label="363 QuantizeLinear_bert/embeddings/one_hot:0_1", type=QuantizeLinear]; +"364 DequantizeLinear_bert/embeddings/one_hot^0_1" [id=364, label="364 DequantizeLinear_bert/embeddings/one_hot:0_1", type=DequantizeLinear]; +"365 QuantizeLinear_bert/embeddings/token_type_embeddings^0_1" [id=365, label="365 QuantizeLinear_bert/embeddings/token_type_embeddings:0_1", type=QuantizeLinear]; +"366 DequantizeLinear_bert/embeddings/token_type_embeddings^0_1" [id=366, label="366 DequantizeLinear_bert/embeddings/token_type_embeddings:0_1", type=DequantizeLinear]; +"367 bert/embeddings/MatMul" [id=367, type=MatMul]; +"368 bert/embeddings/Reshape_3" [id=368, type=Reshape]; +"369 bert/embeddings/add" [id=369, type=Add]; +"370 bert/embeddings/add_1" [id=370, type=Add]; +"371 bert/embeddings/LayerNorm/moments/mean" [id=371, type=ReduceMean]; +"372 bert/embeddings/LayerNorm/moments/StopGradient" [id=372, type=Identity]; +"373 bert/embeddings/LayerNorm/moments/SquaredDifference" [id=373, type=Sub]; +"374 bert/embeddings/LayerNorm/moments/SquaredDifference__72" [id=374, type=Mul]; +"375 bert/embeddings/LayerNorm/moments/variance" [id=375, type=ReduceMean]; +"376 bert/embeddings/LayerNorm/batchnorm/add" [id=376, type=Add]; +"377 bert/embeddings/LayerNorm/batchnorm/Rsqrt" [id=377, type=Sqrt]; +"378 bert/embeddings/LayerNorm/batchnorm/Rsqrt__74" [id=378, type=Reciprocal]; +"379 bert/embeddings/LayerNorm/batchnorm/mul" [id=379, type=Mul]; +"380 bert/embeddings/LayerNorm/batchnorm/mul_2" [id=380, type=Mul]; +"381 bert/embeddings/LayerNorm/batchnorm/sub" [id=381, type=Sub]; +"382 bert/embeddings/LayerNorm/batchnorm/mul_1" [id=382, type=Mul]; +"383 bert/embeddings/LayerNorm/batchnorm/add_1" [id=383, type=Add]; +"384 bert/encoder/Shape_2" [id=384, type=Shape]; +"385 bert/encoder/Shape_2__76" [id=385, type=Cast]; +"386 bert/encoder/strided_slice_2" [id=386, type=Slice]; +"387 Constant_nncf_381" [id=387, type=Constant]; +"388 bert/encoder/strided_slice_2__80" [id=388, type=Squeeze]; +"389 bert/encoder/strided_slice_2__81" [id=389, type=Cast]; +"390 bert/encoder/layer_9/attention/self/mul_2" [id=390, type=Mul]; +"391 Constant_nncf_385" [id=391, type=Constant]; +"392 bert/encoder/layer_9/attention/self/Reshape_3/shape_Unsqueeze__82" [id=392, type=Unsqueeze]; +"393 bert/encoder/layer_9/attention/self/Reshape_3/shape_Concat__84" [id=393, type=Concat]; +"394 bert/encoder/layer_9/attention/self/Reshape_3__434" [id=394, type=Cast]; +"395 Constant_nncf_389" [id=395, type=Constant]; +"396 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__85" [id=396, type=Unsqueeze]; +"397 bert/encoder/layer_9/attention/self/Reshape_2/shape_Concat__89" [id=397, type=Concat]; +"398 bert/encoder/layer_9/attention/self/Reshape_2__429" [id=398, type=Cast]; +"399 Constant_nncf_393" [id=399, type=Constant]; +"400 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__90" [id=400, type=Unsqueeze]; +"401 bert/encoder/layer_9/attention/self/Reshape_1/shape_Concat__94" [id=401, type=Concat]; +"402 bert/encoder/layer_9/attention/self/Reshape_1__431" [id=402, type=Cast]; +"403 Constant_nncf_397" [id=403, type=Constant]; +"404 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__95" [id=404, type=Unsqueeze]; +"405 bert/encoder/layer_9/attention/self/Reshape/shape_Concat__99" [id=405, type=Concat]; +"406 bert/encoder/layer_9/attention/self/Reshape__430" [id=406, type=Cast]; +"407 bert/encoder/layer_8/attention/self/mul_2" [id=407, type=Mul]; +"408 Constant_nncf_402" [id=408, type=Constant]; +"409 bert/encoder/layer_8/attention/self/Reshape_3/shape_Unsqueeze__100" [id=409, type=Unsqueeze]; +"410 bert/encoder/layer_8/attention/self/Reshape_3/shape_Concat__102" [id=410, type=Concat]; +"411 bert/encoder/layer_8/attention/self/Reshape_3__420" [id=411, type=Cast]; +"412 Constant_nncf_406" [id=412, type=Constant]; +"413 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__103" [id=413, type=Unsqueeze]; +"414 bert/encoder/layer_8/attention/self/Reshape_2/shape_Concat__107" [id=414, type=Concat]; +"415 bert/encoder/layer_8/attention/self/Reshape_2__415" [id=415, type=Cast]; +"416 Constant_nncf_410" [id=416, type=Constant]; +"417 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__108" [id=417, type=Unsqueeze]; +"418 bert/encoder/layer_8/attention/self/Reshape_1/shape_Concat__112" [id=418, type=Concat]; +"419 bert/encoder/layer_8/attention/self/Reshape_1__417" [id=419, type=Cast]; +"420 Constant_nncf_414" [id=420, type=Constant]; +"421 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__113" [id=421, type=Unsqueeze]; +"422 bert/encoder/layer_8/attention/self/Reshape/shape_Concat__117" [id=422, type=Concat]; +"423 bert/encoder/layer_8/attention/self/Reshape__416" [id=423, type=Cast]; +"424 bert/encoder/layer_7/attention/self/mul_2" [id=424, type=Mul]; +"425 Constant_nncf_419" [id=425, type=Constant]; +"426 bert/encoder/layer_7/attention/self/Reshape_3/shape_Unsqueeze__118" [id=426, type=Unsqueeze]; +"427 bert/encoder/layer_7/attention/self/Reshape_3/shape_Concat__120" [id=427, type=Concat]; +"428 bert/encoder/layer_7/attention/self/Reshape_3__406" [id=428, type=Cast]; +"429 Constant_nncf_423" [id=429, type=Constant]; +"430 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__121" [id=430, type=Unsqueeze]; +"431 bert/encoder/layer_7/attention/self/Reshape_2/shape_Concat__125" [id=431, type=Concat]; +"432 bert/encoder/layer_7/attention/self/Reshape_2__401" [id=432, type=Cast]; +"433 Constant_nncf_427" [id=433, type=Constant]; +"434 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__126" [id=434, type=Unsqueeze]; +"435 bert/encoder/layer_7/attention/self/Reshape_1/shape_Concat__130" [id=435, type=Concat]; +"436 bert/encoder/layer_7/attention/self/Reshape_1__403" [id=436, type=Cast]; +"437 Constant_nncf_431" [id=437, type=Constant]; +"438 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__131" [id=438, type=Unsqueeze]; +"439 bert/encoder/layer_7/attention/self/Reshape/shape_Concat__135" [id=439, type=Concat]; +"440 bert/encoder/layer_7/attention/self/Reshape__402" [id=440, type=Cast]; +"441 bert/encoder/layer_6/attention/self/mul_2" [id=441, type=Mul]; +"442 Constant_nncf_436" [id=442, type=Constant]; +"443 bert/encoder/layer_6/attention/self/Reshape_3/shape_Unsqueeze__136" [id=443, type=Unsqueeze]; +"444 bert/encoder/layer_6/attention/self/Reshape_3/shape_Concat__138" [id=444, type=Concat]; +"445 bert/encoder/layer_6/attention/self/Reshape_3__392" [id=445, type=Cast]; +"446 Constant_nncf_440" [id=446, type=Constant]; +"447 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__139" [id=447, type=Unsqueeze]; +"448 bert/encoder/layer_6/attention/self/Reshape_2/shape_Concat__143" [id=448, type=Concat]; +"449 bert/encoder/layer_6/attention/self/Reshape_2__387" [id=449, type=Cast]; +"450 Constant_nncf_444" [id=450, type=Constant]; +"451 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__144" [id=451, type=Unsqueeze]; +"452 bert/encoder/layer_6/attention/self/Reshape_1/shape_Concat__148" [id=452, type=Concat]; +"453 bert/encoder/layer_6/attention/self/Reshape_1__389" [id=453, type=Cast]; +"454 Constant_nncf_448" [id=454, type=Constant]; +"455 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__149" [id=455, type=Unsqueeze]; +"456 bert/encoder/layer_6/attention/self/Reshape/shape_Concat__153" [id=456, type=Concat]; +"457 bert/encoder/layer_6/attention/self/Reshape__388" [id=457, type=Cast]; +"458 bert/encoder/layer_5/attention/self/mul_2" [id=458, type=Mul]; +"459 Constant_nncf_453" [id=459, type=Constant]; +"460 bert/encoder/layer_5/attention/self/Reshape_3/shape_Unsqueeze__154" [id=460, type=Unsqueeze]; +"461 bert/encoder/layer_5/attention/self/Reshape_3/shape_Concat__156" [id=461, type=Concat]; +"462 bert/encoder/layer_5/attention/self/Reshape_3__378" [id=462, type=Cast]; +"463 Constant_nncf_457" [id=463, type=Constant]; +"464 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__157" [id=464, type=Unsqueeze]; +"465 bert/encoder/layer_5/attention/self/Reshape_2/shape_Concat__161" [id=465, type=Concat]; +"466 bert/encoder/layer_5/attention/self/Reshape_2__373" [id=466, type=Cast]; +"467 Constant_nncf_461" [id=467, type=Constant]; +"468 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__162" [id=468, type=Unsqueeze]; +"469 bert/encoder/layer_5/attention/self/Reshape_1/shape_Concat__166" [id=469, type=Concat]; +"470 bert/encoder/layer_5/attention/self/Reshape_1__375" [id=470, type=Cast]; +"471 Constant_nncf_465" [id=471, type=Constant]; +"472 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__167" [id=472, type=Unsqueeze]; +"473 bert/encoder/layer_5/attention/self/Reshape/shape_Concat__171" [id=473, type=Concat]; +"474 bert/encoder/layer_5/attention/self/Reshape__374" [id=474, type=Cast]; +"475 bert/encoder/layer_4/attention/self/mul_2" [id=475, type=Mul]; +"476 Constant_nncf_470" [id=476, type=Constant]; +"477 bert/encoder/layer_4/attention/self/Reshape_3/shape_Unsqueeze__172" [id=477, type=Unsqueeze]; +"478 bert/encoder/layer_4/attention/self/Reshape_3/shape_Concat__174" [id=478, type=Concat]; +"479 bert/encoder/layer_4/attention/self/Reshape_3__364" [id=479, type=Cast]; +"480 Constant_nncf_474" [id=480, type=Constant]; +"481 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__175" [id=481, type=Unsqueeze]; +"482 bert/encoder/layer_4/attention/self/Reshape_2/shape_Concat__179" [id=482, type=Concat]; +"483 bert/encoder/layer_4/attention/self/Reshape_2__359" [id=483, type=Cast]; +"484 Constant_nncf_478" [id=484, type=Constant]; +"485 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__180" [id=485, type=Unsqueeze]; +"486 bert/encoder/layer_4/attention/self/Reshape_1/shape_Concat__184" [id=486, type=Concat]; +"487 bert/encoder/layer_4/attention/self/Reshape_1__361" [id=487, type=Cast]; +"488 Constant_nncf_482" [id=488, type=Constant]; +"489 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__185" [id=489, type=Unsqueeze]; +"490 bert/encoder/layer_4/attention/self/Reshape/shape_Concat__189" [id=490, type=Concat]; +"491 bert/encoder/layer_4/attention/self/Reshape__360" [id=491, type=Cast]; +"492 bert/encoder/layer_3/attention/self/mul_2" [id=492, type=Mul]; +"493 Constant_nncf_487" [id=493, type=Constant]; +"494 bert/encoder/layer_3/attention/self/Reshape_3/shape_Unsqueeze__190" [id=494, type=Unsqueeze]; +"495 bert/encoder/layer_3/attention/self/Reshape_3/shape_Concat__192" [id=495, type=Concat]; +"496 bert/encoder/layer_3/attention/self/Reshape_3__350" [id=496, type=Cast]; +"497 Constant_nncf_491" [id=497, type=Constant]; +"498 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__193" [id=498, type=Unsqueeze]; +"499 bert/encoder/layer_3/attention/self/Reshape_2/shape_Concat__197" [id=499, type=Concat]; +"500 bert/encoder/layer_3/attention/self/Reshape_2__345" [id=500, type=Cast]; +"501 Constant_nncf_495" [id=501, type=Constant]; +"502 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__198" [id=502, type=Unsqueeze]; +"503 bert/encoder/layer_3/attention/self/Reshape_1/shape_Concat__202" [id=503, type=Concat]; +"504 bert/encoder/layer_3/attention/self/Reshape_1__347" [id=504, type=Cast]; +"505 Constant_nncf_499" [id=505, type=Constant]; +"506 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__203" [id=506, type=Unsqueeze]; +"507 bert/encoder/layer_3/attention/self/Reshape/shape_Concat__207" [id=507, type=Concat]; +"508 bert/encoder/layer_3/attention/self/Reshape__346" [id=508, type=Cast]; +"509 bert/encoder/layer_2/attention/self/mul_2" [id=509, type=Mul]; +"510 Constant_nncf_504" [id=510, type=Constant]; +"511 bert/encoder/layer_2/attention/self/Reshape_3/shape_Unsqueeze__208" [id=511, type=Unsqueeze]; +"512 bert/encoder/layer_2/attention/self/Reshape_3/shape_Concat__210" [id=512, type=Concat]; +"513 bert/encoder/layer_2/attention/self/Reshape_3__336" [id=513, type=Cast]; +"514 Constant_nncf_508" [id=514, type=Constant]; +"515 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__211" [id=515, type=Unsqueeze]; +"516 bert/encoder/layer_2/attention/self/Reshape_2/shape_Concat__215" [id=516, type=Concat]; +"517 bert/encoder/layer_2/attention/self/Reshape_2__331" [id=517, type=Cast]; +"518 Constant_nncf_512" [id=518, type=Constant]; +"519 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__216" [id=519, type=Unsqueeze]; +"520 bert/encoder/layer_2/attention/self/Reshape_1/shape_Concat__220" [id=520, type=Concat]; +"521 bert/encoder/layer_2/attention/self/Reshape_1__333" [id=521, type=Cast]; +"522 Constant_nncf_516" [id=522, type=Constant]; +"523 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__221" [id=523, type=Unsqueeze]; +"524 bert/encoder/layer_2/attention/self/Reshape/shape_Concat__225" [id=524, type=Concat]; +"525 bert/encoder/layer_2/attention/self/Reshape__332" [id=525, type=Cast]; +"526 bert/encoder/layer_11/attention/self/mul_2" [id=526, type=Mul]; +"527 Constant_nncf_521" [id=527, type=Constant]; +"528 bert/encoder/layer_11/attention/self/Reshape_3/shape_Unsqueeze__226" [id=528, type=Unsqueeze]; +"529 bert/encoder/layer_11/attention/self/Reshape_3/shape_Concat__228" [id=529, type=Concat]; +"530 bert/encoder/layer_11/attention/self/Reshape_3__462" [id=530, type=Cast]; +"531 Constant_nncf_525" [id=531, type=Constant]; +"532 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__229" [id=532, type=Unsqueeze]; +"533 bert/encoder/layer_11/attention/self/Reshape_2/shape_Concat__233" [id=533, type=Concat]; +"534 bert/encoder/layer_11/attention/self/Reshape_2__457" [id=534, type=Cast]; +"535 Constant_nncf_529" [id=535, type=Constant]; +"536 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__234" [id=536, type=Unsqueeze]; +"537 bert/encoder/layer_11/attention/self/Reshape_1/shape_Concat__238" [id=537, type=Concat]; +"538 bert/encoder/layer_11/attention/self/Reshape_1__459" [id=538, type=Cast]; +"539 Constant_nncf_533" [id=539, type=Constant]; +"540 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__239" [id=540, type=Unsqueeze]; +"541 bert/encoder/layer_11/attention/self/Reshape/shape_Concat__243" [id=541, type=Concat]; +"542 bert/encoder/layer_11/attention/self/Reshape__458" [id=542, type=Cast]; +"543 bert/encoder/layer_10/attention/self/mul_2" [id=543, type=Mul]; +"544 Constant_nncf_538" [id=544, type=Constant]; +"545 bert/encoder/layer_10/attention/self/Reshape_3/shape_Unsqueeze__244" [id=545, type=Unsqueeze]; +"546 bert/encoder/layer_10/attention/self/Reshape_3/shape_Concat__246" [id=546, type=Concat]; +"547 bert/encoder/layer_10/attention/self/Reshape_3__448" [id=547, type=Cast]; +"548 Constant_nncf_542" [id=548, type=Constant]; +"549 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__247" [id=549, type=Unsqueeze]; +"550 bert/encoder/layer_10/attention/self/Reshape_2/shape_Concat__251" [id=550, type=Concat]; +"551 bert/encoder/layer_10/attention/self/Reshape_2__443" [id=551, type=Cast]; +"552 Constant_nncf_546" [id=552, type=Constant]; +"553 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__252" [id=553, type=Unsqueeze]; +"554 bert/encoder/layer_10/attention/self/Reshape_1/shape_Concat__256" [id=554, type=Concat]; +"555 bert/encoder/layer_10/attention/self/Reshape_1__445" [id=555, type=Cast]; +"556 Constant_nncf_550" [id=556, type=Constant]; +"557 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__257" [id=557, type=Unsqueeze]; +"558 bert/encoder/layer_10/attention/self/Reshape/shape_Concat__261" [id=558, type=Concat]; +"559 bert/encoder/layer_10/attention/self/Reshape__444" [id=559, type=Cast]; +"560 bert/encoder/layer_1/attention/self/mul_2" [id=560, type=Mul]; +"561 Constant_nncf_555" [id=561, type=Constant]; +"562 bert/encoder/layer_1/attention/self/Reshape_3/shape_Unsqueeze__262" [id=562, type=Unsqueeze]; +"563 bert/encoder/layer_1/attention/self/Reshape_3/shape_Concat__264" [id=563, type=Concat]; +"564 bert/encoder/layer_1/attention/self/Reshape_3__322" [id=564, type=Cast]; +"565 Constant_nncf_559" [id=565, type=Constant]; +"566 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__265" [id=566, type=Unsqueeze]; +"567 bert/encoder/layer_1/attention/self/Reshape_2/shape_Concat__269" [id=567, type=Concat]; +"568 bert/encoder/layer_1/attention/self/Reshape_2__317" [id=568, type=Cast]; +"569 Constant_nncf_563" [id=569, type=Constant]; +"570 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__270" [id=570, type=Unsqueeze]; +"571 bert/encoder/layer_1/attention/self/Reshape_1/shape_Concat__274" [id=571, type=Concat]; +"572 bert/encoder/layer_1/attention/self/Reshape_1__319" [id=572, type=Cast]; +"573 Constant_nncf_567" [id=573, type=Constant]; +"574 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__275" [id=574, type=Unsqueeze]; +"575 bert/encoder/layer_1/attention/self/Reshape/shape_Concat__279" [id=575, type=Concat]; +"576 bert/encoder/layer_1/attention/self/Reshape__318" [id=576, type=Cast]; +"577 bert/encoder/layer_0/attention/self/mul_2" [id=577, type=Mul]; +"578 Constant_nncf_572" [id=578, type=Constant]; +"579 bert/encoder/layer_0/attention/self/Reshape_3/shape_Unsqueeze__280" [id=579, type=Unsqueeze]; +"580 bert/encoder/layer_0/attention/self/Reshape_3/shape_Concat__282" [id=580, type=Concat]; +"581 bert/encoder/layer_0/attention/self/Reshape_3__308" [id=581, type=Cast]; +"582 Constant_nncf_576" [id=582, type=Constant]; +"583 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__283" [id=583, type=Unsqueeze]; +"584 bert/encoder/layer_0/attention/self/Reshape_2/shape_Concat__287" [id=584, type=Concat]; +"585 bert/encoder/layer_0/attention/self/Reshape_2__303" [id=585, type=Cast]; +"586 Constant_nncf_580" [id=586, type=Constant]; +"587 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__288" [id=587, type=Unsqueeze]; +"588 bert/encoder/layer_0/attention/self/Reshape_1/shape_Concat__292" [id=588, type=Concat]; +"589 bert/encoder/layer_0/attention/self/Reshape_1__305" [id=589, type=Cast]; +"590 Constant_nncf_584" [id=590, type=Constant]; +"591 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__293" [id=591, type=Unsqueeze]; +"592 bert/encoder/layer_0/attention/self/Reshape/shape_Concat__297" [id=592, type=Concat]; +"593 bert/encoder/layer_0/attention/self/Reshape__304" [id=593, type=Cast]; +"594 Constant_nncf_588" [id=594, type=Constant]; +"595 bert/encoder/Reshape_13/shape_Unsqueeze__298" [id=595, type=Unsqueeze]; +"596 bert/encoder/Reshape_13/shape_Concat__301" [id=596, type=Concat]; +"597 bert/encoder/Reshape_13__471" [id=597, type=Cast]; +"598 bert/encoder/Reshape_1" [id=598, type=Reshape]; +"599 QuantizeLinear_bert/encoder/Reshape_1^0_1" [id=599, label="599 QuantizeLinear_bert/encoder/Reshape_1:0_1", type=QuantizeLinear]; +"600 DequantizeLinear_bert/encoder/Reshape_1^0_1" [id=600, label="600 DequantizeLinear_bert/encoder/Reshape_1:0_1", type=DequantizeLinear]; +"601 QuantizeLinear_bert/encoder/layer_0/attention/self/value/kernel^0_1" [id=601, label="601 QuantizeLinear_bert/encoder/layer_0/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"602 DequantizeLinear_bert/encoder/layer_0/attention/self/value/kernel^0_1" [id=602, label="602 DequantizeLinear_bert/encoder/layer_0/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"603 QuantizeLinear_bert/encoder/Reshape_1^0_2" [id=603, label="603 QuantizeLinear_bert/encoder/Reshape_1:0_2", type=QuantizeLinear]; +"604 DequantizeLinear_bert/encoder/Reshape_1^0_2" [id=604, label="604 DequantizeLinear_bert/encoder/Reshape_1:0_2", type=DequantizeLinear]; +"605 QuantizeLinear_bert/encoder/Reshape_1^0_3" [id=605, label="605 QuantizeLinear_bert/encoder/Reshape_1:0_3", type=QuantizeLinear]; +"606 DequantizeLinear_bert/encoder/Reshape_1^0_3" [id=606, label="606 DequantizeLinear_bert/encoder/Reshape_1:0_3", type=DequantizeLinear]; +"607 bert/encoder/layer_0/attention/self/value/MatMul" [id=607, type=MatMul]; +"608 bert/encoder/layer_0/attention/self/value/BiasAdd" [id=608, type=Add]; +"609 bert/encoder/layer_0/attention/self/Reshape_2" [id=609, type=Reshape]; +"610 bert/encoder/layer_0/attention/self/transpose_2" [id=610, type=Transpose]; +"611 QuantizeLinear_bert/encoder/layer_0/attention/self/query/kernel^0_1" [id=611, label="611 QuantizeLinear_bert/encoder/layer_0/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"612 DequantizeLinear_bert/encoder/layer_0/attention/self/query/kernel^0_1" [id=612, label="612 DequantizeLinear_bert/encoder/layer_0/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"613 bert/encoder/layer_0/attention/self/query/MatMul" [id=613, type=MatMul]; +"614 bert/encoder/layer_0/attention/self/query/BiasAdd" [id=614, type=Add]; +"615 QuantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd^0_1" [id=615, label="615 QuantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"616 DequantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd^0_1" [id=616, label="616 DequantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"617 bert/encoder/layer_0/attention/self/Reshape" [id=617, type=Reshape]; +"618 bert/encoder/layer_0/attention/self/transpose" [id=618, type=Transpose]; +"619 QuantizeLinear_bert/encoder/layer_0/attention/self/key/kernel^0_1" [id=619, label="619 QuantizeLinear_bert/encoder/layer_0/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"620 DequantizeLinear_bert/encoder/layer_0/attention/self/key/kernel^0_1" [id=620, label="620 DequantizeLinear_bert/encoder/layer_0/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"621 bert/encoder/layer_0/attention/self/key/MatMul" [id=621, type=MatMul]; +"622 bert/encoder/layer_0/attention/self/key/BiasAdd" [id=622, type=Add]; +"623 QuantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd^0_1" [id=623, label="623 QuantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"624 DequantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd^0_1" [id=624, label="624 DequantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"625 bert/encoder/layer_0/attention/self/Reshape_1" [id=625, type=Reshape]; +"626 bert/encoder/layer_0/attention/self/transpose_1" [id=626, type=Transpose]; +"627 bert/encoder/layer_0/attention/self/MatMul__306" [id=627, type=Transpose]; +"628 bert/encoder/layer_0/attention/self/MatMul" [id=628, type=MatMul]; +"629 bert/encoder/layer_0/attention/self/Mul" [id=629, type=Mul]; +"630 bert/encoder/layer_0/attention/self/add" [id=630, type=Add]; +"631 Shape_nncf_609" [id=631, type=Shape]; +"632 Flatten_nncf_610" [id=632, type=Flatten]; +"633 bert/encoder/layer_0/attention/self/Softmax" [id=633, type=Softmax]; +"634 Reshape_nncf_612" [id=634, type=Reshape]; +"635 bert/encoder/layer_0/attention/self/MatMul_1" [id=635, type=MatMul]; +"636 QuantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1^0_1" [id=636, label="636 QuantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"637 DequantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1^0_1" [id=637, label="637 DequantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"638 bert/encoder/layer_0/attention/self/transpose_3" [id=638, type=Transpose]; +"639 bert/encoder/layer_0/attention/self/Reshape_3" [id=639, type=Reshape]; +"640 QuantizeLinear_bert/encoder/layer_0/attention/output/dense/kernel^0_1" [id=640, label="640 QuantizeLinear_bert/encoder/layer_0/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"641 DequantizeLinear_bert/encoder/layer_0/attention/output/dense/kernel^0_1" [id=641, label="641 DequantizeLinear_bert/encoder/layer_0/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"642 bert/encoder/layer_0/attention/output/dense/MatMul" [id=642, type=MatMul]; +"643 bert/encoder/layer_0/attention/output/dense/BiasAdd" [id=643, type=Add]; +"644 bert/encoder/layer_0/attention/output/add" [id=644, type=Add]; +"645 bert/encoder/layer_0/attention/output/LayerNorm/moments/mean" [id=645, type=ReduceMean]; +"646 bert/encoder/layer_0/attention/output/LayerNorm/moments/StopGradient" [id=646, type=Identity]; +"647 bert/encoder/layer_0/attention/output/LayerNorm/moments/SquaredDifference" [id=647, type=Sub]; +"648 bert/encoder/layer_0/attention/output/LayerNorm/moments/SquaredDifference__309" [id=648, type=Mul]; +"649 bert/encoder/layer_0/attention/output/LayerNorm/moments/variance" [id=649, type=ReduceMean]; +"650 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add" [id=650, type=Add]; +"651 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/Rsqrt" [id=651, type=Sqrt]; +"652 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/Rsqrt__311" [id=652, type=Reciprocal]; +"653 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul" [id=653, type=Mul]; +"654 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_2" [id=654, type=Mul]; +"655 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/sub" [id=655, type=Sub]; +"656 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_1" [id=656, type=Mul]; +"657 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1" [id=657, type=Add]; +"658 QuantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=658, label="658 QuantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"659 DequantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=659, label="659 DequantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"660 QuantizeLinear_bert/encoder/layer_0/intermediate/dense/kernel^0_1" [id=660, label="660 QuantizeLinear_bert/encoder/layer_0/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"661 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/kernel^0_1" [id=661, label="661 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"662 bert/encoder/layer_0/intermediate/dense/MatMul" [id=662, type=MatMul]; +"663 bert/encoder/layer_0/intermediate/dense/BiasAdd" [id=663, type=Add]; +"664 bert/encoder/layer_0/intermediate/dense/Pow" [id=664, type=Pow]; +"665 bert/encoder/layer_0/intermediate/dense/mul" [id=665, type=Mul]; +"666 bert/encoder/layer_0/intermediate/dense/add" [id=666, type=Add]; +"667 bert/encoder/layer_0/intermediate/dense/mul_1" [id=667, type=Mul]; +"668 bert/encoder/layer_0/intermediate/dense/Tanh" [id=668, type=Tanh]; +"669 bert/encoder/layer_0/intermediate/dense/add_1" [id=669, type=Add]; +"670 bert/encoder/layer_0/intermediate/dense/mul_2" [id=670, type=Mul]; +"671 bert/encoder/layer_0/intermediate/dense/mul_3" [id=671, type=Mul]; +"672 QuantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3^0_1" [id=672, label="672 QuantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"673 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3^0_1" [id=673, label="673 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"674 QuantizeLinear_bert/encoder/layer_0/output/dense/kernel^0_1" [id=674, label="674 QuantizeLinear_bert/encoder/layer_0/output/dense/kernel:0_1", type=QuantizeLinear]; +"675 DequantizeLinear_bert/encoder/layer_0/output/dense/kernel^0_1" [id=675, label="675 DequantizeLinear_bert/encoder/layer_0/output/dense/kernel:0_1", type=DequantizeLinear]; +"676 bert/encoder/layer_0/output/dense/MatMul" [id=676, type=MatMul]; +"677 bert/encoder/layer_0/output/dense/BiasAdd" [id=677, type=Add]; +"678 bert/encoder/layer_0/output/add" [id=678, type=Add]; +"679 bert/encoder/layer_0/output/LayerNorm/moments/mean" [id=679, type=ReduceMean]; +"680 bert/encoder/layer_0/output/LayerNorm/moments/StopGradient" [id=680, type=Identity]; +"681 bert/encoder/layer_0/output/LayerNorm/moments/SquaredDifference" [id=681, type=Sub]; +"682 bert/encoder/layer_0/output/LayerNorm/moments/SquaredDifference__313" [id=682, type=Mul]; +"683 bert/encoder/layer_0/output/LayerNorm/moments/variance" [id=683, type=ReduceMean]; +"684 bert/encoder/layer_0/output/LayerNorm/batchnorm/add" [id=684, type=Add]; +"685 bert/encoder/layer_0/output/LayerNorm/batchnorm/Rsqrt" [id=685, type=Sqrt]; +"686 bert/encoder/layer_0/output/LayerNorm/batchnorm/Rsqrt__315" [id=686, type=Reciprocal]; +"687 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul" [id=687, type=Mul]; +"688 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_2" [id=688, type=Mul]; +"689 bert/encoder/layer_0/output/LayerNorm/batchnorm/sub" [id=689, type=Sub]; +"690 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_1" [id=690, type=Mul]; +"691 bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1" [id=691, type=Add]; +"692 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_1" [id=692, label="692 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"693 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_1" [id=693, label="693 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"694 QuantizeLinear_bert/encoder/layer_1/attention/self/value/kernel^0_1" [id=694, label="694 QuantizeLinear_bert/encoder/layer_1/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"695 DequantizeLinear_bert/encoder/layer_1/attention/self/value/kernel^0_1" [id=695, label="695 DequantizeLinear_bert/encoder/layer_1/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"696 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_2" [id=696, label="696 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"697 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_2" [id=697, label="697 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"698 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_3" [id=698, label="698 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"699 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_3" [id=699, label="699 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"700 bert/encoder/layer_1/attention/self/value/MatMul" [id=700, type=MatMul]; +"701 bert/encoder/layer_1/attention/self/value/BiasAdd" [id=701, type=Add]; +"702 bert/encoder/layer_1/attention/self/Reshape_2" [id=702, type=Reshape]; +"703 bert/encoder/layer_1/attention/self/transpose_2" [id=703, type=Transpose]; +"704 QuantizeLinear_bert/encoder/layer_1/attention/self/query/kernel^0_1" [id=704, label="704 QuantizeLinear_bert/encoder/layer_1/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"705 DequantizeLinear_bert/encoder/layer_1/attention/self/query/kernel^0_1" [id=705, label="705 DequantizeLinear_bert/encoder/layer_1/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"706 bert/encoder/layer_1/attention/self/query/MatMul" [id=706, type=MatMul]; +"707 bert/encoder/layer_1/attention/self/query/BiasAdd" [id=707, type=Add]; +"708 QuantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd^0_1" [id=708, label="708 QuantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"709 DequantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd^0_1" [id=709, label="709 DequantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"710 bert/encoder/layer_1/attention/self/Reshape" [id=710, type=Reshape]; +"711 bert/encoder/layer_1/attention/self/transpose" [id=711, type=Transpose]; +"712 QuantizeLinear_bert/encoder/layer_1/attention/self/key/kernel^0_1" [id=712, label="712 QuantizeLinear_bert/encoder/layer_1/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"713 DequantizeLinear_bert/encoder/layer_1/attention/self/key/kernel^0_1" [id=713, label="713 DequantizeLinear_bert/encoder/layer_1/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"714 bert/encoder/layer_1/attention/self/key/MatMul" [id=714, type=MatMul]; +"715 bert/encoder/layer_1/attention/self/key/BiasAdd" [id=715, type=Add]; +"716 QuantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd^0_1" [id=716, label="716 QuantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"717 DequantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd^0_1" [id=717, label="717 DequantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"718 bert/encoder/layer_1/attention/self/Reshape_1" [id=718, type=Reshape]; +"719 bert/encoder/layer_1/attention/self/transpose_1" [id=719, type=Transpose]; +"720 bert/encoder/layer_1/attention/self/MatMul__320" [id=720, type=Transpose]; +"721 bert/encoder/layer_1/attention/self/MatMul" [id=721, type=MatMul]; +"722 bert/encoder/layer_1/attention/self/Mul" [id=722, type=Mul]; +"723 bert/encoder/layer_1/attention/self/add" [id=723, type=Add]; +"724 Shape_nncf_674" [id=724, type=Shape]; +"725 Flatten_nncf_675" [id=725, type=Flatten]; +"726 bert/encoder/layer_1/attention/self/Softmax" [id=726, type=Softmax]; +"727 Reshape_nncf_677" [id=727, type=Reshape]; +"728 bert/encoder/layer_1/attention/self/MatMul_1" [id=728, type=MatMul]; +"729 QuantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1^0_1" [id=729, label="729 QuantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"730 DequantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1^0_1" [id=730, label="730 DequantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"731 bert/encoder/layer_1/attention/self/transpose_3" [id=731, type=Transpose]; +"732 bert/encoder/layer_1/attention/self/Reshape_3" [id=732, type=Reshape]; +"733 QuantizeLinear_bert/encoder/layer_1/attention/output/dense/kernel^0_1" [id=733, label="733 QuantizeLinear_bert/encoder/layer_1/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"734 DequantizeLinear_bert/encoder/layer_1/attention/output/dense/kernel^0_1" [id=734, label="734 DequantizeLinear_bert/encoder/layer_1/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"735 bert/encoder/layer_1/attention/output/dense/MatMul" [id=735, type=MatMul]; +"736 bert/encoder/layer_1/attention/output/dense/BiasAdd" [id=736, type=Add]; +"737 bert/encoder/layer_1/attention/output/add" [id=737, type=Add]; +"738 bert/encoder/layer_1/attention/output/LayerNorm/moments/mean" [id=738, type=ReduceMean]; +"739 bert/encoder/layer_1/attention/output/LayerNorm/moments/StopGradient" [id=739, type=Identity]; +"740 bert/encoder/layer_1/attention/output/LayerNorm/moments/SquaredDifference" [id=740, type=Sub]; +"741 bert/encoder/layer_1/attention/output/LayerNorm/moments/SquaredDifference__323" [id=741, type=Mul]; +"742 bert/encoder/layer_1/attention/output/LayerNorm/moments/variance" [id=742, type=ReduceMean]; +"743 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add" [id=743, type=Add]; +"744 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/Rsqrt" [id=744, type=Sqrt]; +"745 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/Rsqrt__325" [id=745, type=Reciprocal]; +"746 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul" [id=746, type=Mul]; +"747 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_2" [id=747, type=Mul]; +"748 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/sub" [id=748, type=Sub]; +"749 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_1" [id=749, type=Mul]; +"750 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1" [id=750, type=Add]; +"751 QuantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=751, label="751 QuantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"752 DequantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=752, label="752 DequantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"753 QuantizeLinear_bert/encoder/layer_1/intermediate/dense/kernel^0_1" [id=753, label="753 QuantizeLinear_bert/encoder/layer_1/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"754 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/kernel^0_1" [id=754, label="754 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"755 bert/encoder/layer_1/intermediate/dense/MatMul" [id=755, type=MatMul]; +"756 bert/encoder/layer_1/intermediate/dense/BiasAdd" [id=756, type=Add]; +"757 bert/encoder/layer_1/intermediate/dense/Pow" [id=757, type=Pow]; +"758 bert/encoder/layer_1/intermediate/dense/mul" [id=758, type=Mul]; +"759 bert/encoder/layer_1/intermediate/dense/add" [id=759, type=Add]; +"760 bert/encoder/layer_1/intermediate/dense/mul_1" [id=760, type=Mul]; +"761 bert/encoder/layer_1/intermediate/dense/Tanh" [id=761, type=Tanh]; +"762 bert/encoder/layer_1/intermediate/dense/add_1" [id=762, type=Add]; +"763 bert/encoder/layer_1/intermediate/dense/mul_2" [id=763, type=Mul]; +"764 bert/encoder/layer_1/intermediate/dense/mul_3" [id=764, type=Mul]; +"765 QuantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3^0_1" [id=765, label="765 QuantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"766 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3^0_1" [id=766, label="766 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"767 QuantizeLinear_bert/encoder/layer_1/output/dense/kernel^0_1" [id=767, label="767 QuantizeLinear_bert/encoder/layer_1/output/dense/kernel:0_1", type=QuantizeLinear]; +"768 DequantizeLinear_bert/encoder/layer_1/output/dense/kernel^0_1" [id=768, label="768 DequantizeLinear_bert/encoder/layer_1/output/dense/kernel:0_1", type=DequantizeLinear]; +"769 bert/encoder/layer_1/output/dense/MatMul" [id=769, type=MatMul]; +"770 bert/encoder/layer_1/output/dense/BiasAdd" [id=770, type=Add]; +"771 bert/encoder/layer_1/output/add" [id=771, type=Add]; +"772 bert/encoder/layer_1/output/LayerNorm/moments/mean" [id=772, type=ReduceMean]; +"773 bert/encoder/layer_1/output/LayerNorm/moments/StopGradient" [id=773, type=Identity]; +"774 bert/encoder/layer_1/output/LayerNorm/moments/SquaredDifference" [id=774, type=Sub]; +"775 bert/encoder/layer_1/output/LayerNorm/moments/SquaredDifference__327" [id=775, type=Mul]; +"776 bert/encoder/layer_1/output/LayerNorm/moments/variance" [id=776, type=ReduceMean]; +"777 bert/encoder/layer_1/output/LayerNorm/batchnorm/add" [id=777, type=Add]; +"778 bert/encoder/layer_1/output/LayerNorm/batchnorm/Rsqrt" [id=778, type=Sqrt]; +"779 bert/encoder/layer_1/output/LayerNorm/batchnorm/Rsqrt__329" [id=779, type=Reciprocal]; +"780 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul" [id=780, type=Mul]; +"781 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_2" [id=781, type=Mul]; +"782 bert/encoder/layer_1/output/LayerNorm/batchnorm/sub" [id=782, type=Sub]; +"783 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_1" [id=783, type=Mul]; +"784 bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1" [id=784, type=Add]; +"785 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_1" [id=785, label="785 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"786 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_1" [id=786, label="786 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"787 QuantizeLinear_bert/encoder/layer_2/attention/self/value/kernel^0_1" [id=787, label="787 QuantizeLinear_bert/encoder/layer_2/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"788 DequantizeLinear_bert/encoder/layer_2/attention/self/value/kernel^0_1" [id=788, label="788 DequantizeLinear_bert/encoder/layer_2/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"789 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_2" [id=789, label="789 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"790 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_2" [id=790, label="790 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"791 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_3" [id=791, label="791 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"792 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_3" [id=792, label="792 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"793 bert/encoder/layer_2/attention/self/value/MatMul" [id=793, type=MatMul]; +"794 bert/encoder/layer_2/attention/self/value/BiasAdd" [id=794, type=Add]; +"795 bert/encoder/layer_2/attention/self/Reshape_2" [id=795, type=Reshape]; +"796 bert/encoder/layer_2/attention/self/transpose_2" [id=796, type=Transpose]; +"797 QuantizeLinear_bert/encoder/layer_2/attention/self/query/kernel^0_1" [id=797, label="797 QuantizeLinear_bert/encoder/layer_2/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"798 DequantizeLinear_bert/encoder/layer_2/attention/self/query/kernel^0_1" [id=798, label="798 DequantizeLinear_bert/encoder/layer_2/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"799 bert/encoder/layer_2/attention/self/query/MatMul" [id=799, type=MatMul]; +"800 bert/encoder/layer_2/attention/self/query/BiasAdd" [id=800, type=Add]; +"801 QuantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd^0_1" [id=801, label="801 QuantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"802 DequantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd^0_1" [id=802, label="802 DequantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"803 bert/encoder/layer_2/attention/self/Reshape" [id=803, type=Reshape]; +"804 bert/encoder/layer_2/attention/self/transpose" [id=804, type=Transpose]; +"805 QuantizeLinear_bert/encoder/layer_2/attention/self/key/kernel^0_1" [id=805, label="805 QuantizeLinear_bert/encoder/layer_2/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"806 DequantizeLinear_bert/encoder/layer_2/attention/self/key/kernel^0_1" [id=806, label="806 DequantizeLinear_bert/encoder/layer_2/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"807 bert/encoder/layer_2/attention/self/key/MatMul" [id=807, type=MatMul]; +"808 bert/encoder/layer_2/attention/self/key/BiasAdd" [id=808, type=Add]; +"809 QuantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd^0_1" [id=809, label="809 QuantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"810 DequantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd^0_1" [id=810, label="810 DequantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"811 bert/encoder/layer_2/attention/self/Reshape_1" [id=811, type=Reshape]; +"812 bert/encoder/layer_2/attention/self/transpose_1" [id=812, type=Transpose]; +"813 bert/encoder/layer_2/attention/self/MatMul__334" [id=813, type=Transpose]; +"814 bert/encoder/layer_2/attention/self/MatMul" [id=814, type=MatMul]; +"815 bert/encoder/layer_2/attention/self/Mul" [id=815, type=Mul]; +"816 bert/encoder/layer_2/attention/self/add" [id=816, type=Add]; +"817 Shape_nncf_739" [id=817, type=Shape]; +"818 Flatten_nncf_740" [id=818, type=Flatten]; +"819 bert/encoder/layer_2/attention/self/Softmax" [id=819, type=Softmax]; +"820 Reshape_nncf_742" [id=820, type=Reshape]; +"821 bert/encoder/layer_2/attention/self/MatMul_1" [id=821, type=MatMul]; +"822 QuantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1^0_1" [id=822, label="822 QuantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"823 DequantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1^0_1" [id=823, label="823 DequantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"824 bert/encoder/layer_2/attention/self/transpose_3" [id=824, type=Transpose]; +"825 bert/encoder/layer_2/attention/self/Reshape_3" [id=825, type=Reshape]; +"826 QuantizeLinear_bert/encoder/layer_2/attention/output/dense/kernel^0_1" [id=826, label="826 QuantizeLinear_bert/encoder/layer_2/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"827 DequantizeLinear_bert/encoder/layer_2/attention/output/dense/kernel^0_1" [id=827, label="827 DequantizeLinear_bert/encoder/layer_2/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"828 bert/encoder/layer_2/attention/output/dense/MatMul" [id=828, type=MatMul]; +"829 bert/encoder/layer_2/attention/output/dense/BiasAdd" [id=829, type=Add]; +"830 bert/encoder/layer_2/attention/output/add" [id=830, type=Add]; +"831 bert/encoder/layer_2/attention/output/LayerNorm/moments/mean" [id=831, type=ReduceMean]; +"832 bert/encoder/layer_2/attention/output/LayerNorm/moments/StopGradient" [id=832, type=Identity]; +"833 bert/encoder/layer_2/attention/output/LayerNorm/moments/SquaredDifference" [id=833, type=Sub]; +"834 bert/encoder/layer_2/attention/output/LayerNorm/moments/SquaredDifference__337" [id=834, type=Mul]; +"835 bert/encoder/layer_2/attention/output/LayerNorm/moments/variance" [id=835, type=ReduceMean]; +"836 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add" [id=836, type=Add]; +"837 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/Rsqrt" [id=837, type=Sqrt]; +"838 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/Rsqrt__339" [id=838, type=Reciprocal]; +"839 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul" [id=839, type=Mul]; +"840 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_2" [id=840, type=Mul]; +"841 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/sub" [id=841, type=Sub]; +"842 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_1" [id=842, type=Mul]; +"843 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1" [id=843, type=Add]; +"844 QuantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=844, label="844 QuantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"845 DequantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=845, label="845 DequantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"846 QuantizeLinear_bert/encoder/layer_2/intermediate/dense/kernel^0_1" [id=846, label="846 QuantizeLinear_bert/encoder/layer_2/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"847 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/kernel^0_1" [id=847, label="847 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"848 bert/encoder/layer_2/intermediate/dense/MatMul" [id=848, type=MatMul]; +"849 bert/encoder/layer_2/intermediate/dense/BiasAdd" [id=849, type=Add]; +"850 bert/encoder/layer_2/intermediate/dense/Pow" [id=850, type=Pow]; +"851 bert/encoder/layer_2/intermediate/dense/mul" [id=851, type=Mul]; +"852 bert/encoder/layer_2/intermediate/dense/add" [id=852, type=Add]; +"853 bert/encoder/layer_2/intermediate/dense/mul_1" [id=853, type=Mul]; +"854 bert/encoder/layer_2/intermediate/dense/Tanh" [id=854, type=Tanh]; +"855 bert/encoder/layer_2/intermediate/dense/add_1" [id=855, type=Add]; +"856 bert/encoder/layer_2/intermediate/dense/mul_2" [id=856, type=Mul]; +"857 bert/encoder/layer_2/intermediate/dense/mul_3" [id=857, type=Mul]; +"858 QuantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3^0_1" [id=858, label="858 QuantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"859 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3^0_1" [id=859, label="859 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"860 QuantizeLinear_bert/encoder/layer_2/output/dense/kernel^0_1" [id=860, label="860 QuantizeLinear_bert/encoder/layer_2/output/dense/kernel:0_1", type=QuantizeLinear]; +"861 DequantizeLinear_bert/encoder/layer_2/output/dense/kernel^0_1" [id=861, label="861 DequantizeLinear_bert/encoder/layer_2/output/dense/kernel:0_1", type=DequantizeLinear]; +"862 bert/encoder/layer_2/output/dense/MatMul" [id=862, type=MatMul]; +"863 bert/encoder/layer_2/output/dense/BiasAdd" [id=863, type=Add]; +"864 bert/encoder/layer_2/output/add" [id=864, type=Add]; +"865 bert/encoder/layer_2/output/LayerNorm/moments/mean" [id=865, type=ReduceMean]; +"866 bert/encoder/layer_2/output/LayerNorm/moments/StopGradient" [id=866, type=Identity]; +"867 bert/encoder/layer_2/output/LayerNorm/moments/SquaredDifference" [id=867, type=Sub]; +"868 bert/encoder/layer_2/output/LayerNorm/moments/SquaredDifference__341" [id=868, type=Mul]; +"869 bert/encoder/layer_2/output/LayerNorm/moments/variance" [id=869, type=ReduceMean]; +"870 bert/encoder/layer_2/output/LayerNorm/batchnorm/add" [id=870, type=Add]; +"871 bert/encoder/layer_2/output/LayerNorm/batchnorm/Rsqrt" [id=871, type=Sqrt]; +"872 bert/encoder/layer_2/output/LayerNorm/batchnorm/Rsqrt__343" [id=872, type=Reciprocal]; +"873 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul" [id=873, type=Mul]; +"874 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_2" [id=874, type=Mul]; +"875 bert/encoder/layer_2/output/LayerNorm/batchnorm/sub" [id=875, type=Sub]; +"876 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_1" [id=876, type=Mul]; +"877 bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1" [id=877, type=Add]; +"878 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_1" [id=878, label="878 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"879 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_1" [id=879, label="879 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"880 QuantizeLinear_bert/encoder/layer_3/attention/self/value/kernel^0_1" [id=880, label="880 QuantizeLinear_bert/encoder/layer_3/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"881 DequantizeLinear_bert/encoder/layer_3/attention/self/value/kernel^0_1" [id=881, label="881 DequantizeLinear_bert/encoder/layer_3/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"882 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_2" [id=882, label="882 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"883 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_2" [id=883, label="883 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"884 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_3" [id=884, label="884 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"885 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_3" [id=885, label="885 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"886 bert/encoder/layer_3/attention/self/value/MatMul" [id=886, type=MatMul]; +"887 bert/encoder/layer_3/attention/self/value/BiasAdd" [id=887, type=Add]; +"888 bert/encoder/layer_3/attention/self/Reshape_2" [id=888, type=Reshape]; +"889 bert/encoder/layer_3/attention/self/transpose_2" [id=889, type=Transpose]; +"890 QuantizeLinear_bert/encoder/layer_3/attention/self/query/kernel^0_1" [id=890, label="890 QuantizeLinear_bert/encoder/layer_3/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"891 DequantizeLinear_bert/encoder/layer_3/attention/self/query/kernel^0_1" [id=891, label="891 DequantizeLinear_bert/encoder/layer_3/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"892 bert/encoder/layer_3/attention/self/query/MatMul" [id=892, type=MatMul]; +"893 bert/encoder/layer_3/attention/self/query/BiasAdd" [id=893, type=Add]; +"894 QuantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd^0_1" [id=894, label="894 QuantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"895 DequantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd^0_1" [id=895, label="895 DequantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"896 bert/encoder/layer_3/attention/self/Reshape" [id=896, type=Reshape]; +"897 bert/encoder/layer_3/attention/self/transpose" [id=897, type=Transpose]; +"898 QuantizeLinear_bert/encoder/layer_3/attention/self/key/kernel^0_1" [id=898, label="898 QuantizeLinear_bert/encoder/layer_3/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"899 DequantizeLinear_bert/encoder/layer_3/attention/self/key/kernel^0_1" [id=899, label="899 DequantizeLinear_bert/encoder/layer_3/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"900 bert/encoder/layer_3/attention/self/key/MatMul" [id=900, type=MatMul]; +"901 bert/encoder/layer_3/attention/self/key/BiasAdd" [id=901, type=Add]; +"902 QuantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd^0_1" [id=902, label="902 QuantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"903 DequantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd^0_1" [id=903, label="903 DequantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"904 bert/encoder/layer_3/attention/self/Reshape_1" [id=904, type=Reshape]; +"905 bert/encoder/layer_3/attention/self/transpose_1" [id=905, type=Transpose]; +"906 bert/encoder/layer_3/attention/self/MatMul__348" [id=906, type=Transpose]; +"907 bert/encoder/layer_3/attention/self/MatMul" [id=907, type=MatMul]; +"908 bert/encoder/layer_3/attention/self/Mul" [id=908, type=Mul]; +"909 bert/encoder/layer_3/attention/self/add" [id=909, type=Add]; +"910 Shape_nncf_804" [id=910, type=Shape]; +"911 Flatten_nncf_805" [id=911, type=Flatten]; +"912 bert/encoder/layer_3/attention/self/Softmax" [id=912, type=Softmax]; +"913 Reshape_nncf_807" [id=913, type=Reshape]; +"914 bert/encoder/layer_3/attention/self/MatMul_1" [id=914, type=MatMul]; +"915 QuantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1^0_1" [id=915, label="915 QuantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"916 DequantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1^0_1" [id=916, label="916 DequantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"917 bert/encoder/layer_3/attention/self/transpose_3" [id=917, type=Transpose]; +"918 bert/encoder/layer_3/attention/self/Reshape_3" [id=918, type=Reshape]; +"919 QuantizeLinear_bert/encoder/layer_3/attention/output/dense/kernel^0_1" [id=919, label="919 QuantizeLinear_bert/encoder/layer_3/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"920 DequantizeLinear_bert/encoder/layer_3/attention/output/dense/kernel^0_1" [id=920, label="920 DequantizeLinear_bert/encoder/layer_3/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"921 bert/encoder/layer_3/attention/output/dense/MatMul" [id=921, type=MatMul]; +"922 bert/encoder/layer_3/attention/output/dense/BiasAdd" [id=922, type=Add]; +"923 bert/encoder/layer_3/attention/output/add" [id=923, type=Add]; +"924 bert/encoder/layer_3/attention/output/LayerNorm/moments/mean" [id=924, type=ReduceMean]; +"925 bert/encoder/layer_3/attention/output/LayerNorm/moments/StopGradient" [id=925, type=Identity]; +"926 bert/encoder/layer_3/attention/output/LayerNorm/moments/SquaredDifference" [id=926, type=Sub]; +"927 bert/encoder/layer_3/attention/output/LayerNorm/moments/SquaredDifference__351" [id=927, type=Mul]; +"928 bert/encoder/layer_3/attention/output/LayerNorm/moments/variance" [id=928, type=ReduceMean]; +"929 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add" [id=929, type=Add]; +"930 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/Rsqrt" [id=930, type=Sqrt]; +"931 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/Rsqrt__353" [id=931, type=Reciprocal]; +"932 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul" [id=932, type=Mul]; +"933 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_2" [id=933, type=Mul]; +"934 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/sub" [id=934, type=Sub]; +"935 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_1" [id=935, type=Mul]; +"936 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1" [id=936, type=Add]; +"937 QuantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=937, label="937 QuantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"938 DequantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=938, label="938 DequantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"939 QuantizeLinear_bert/encoder/layer_3/intermediate/dense/kernel^0_1" [id=939, label="939 QuantizeLinear_bert/encoder/layer_3/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"940 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/kernel^0_1" [id=940, label="940 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"941 bert/encoder/layer_3/intermediate/dense/MatMul" [id=941, type=MatMul]; +"942 bert/encoder/layer_3/intermediate/dense/BiasAdd" [id=942, type=Add]; +"943 bert/encoder/layer_3/intermediate/dense/Pow" [id=943, type=Pow]; +"944 bert/encoder/layer_3/intermediate/dense/mul" [id=944, type=Mul]; +"945 bert/encoder/layer_3/intermediate/dense/add" [id=945, type=Add]; +"946 bert/encoder/layer_3/intermediate/dense/mul_1" [id=946, type=Mul]; +"947 bert/encoder/layer_3/intermediate/dense/Tanh" [id=947, type=Tanh]; +"948 bert/encoder/layer_3/intermediate/dense/add_1" [id=948, type=Add]; +"949 bert/encoder/layer_3/intermediate/dense/mul_2" [id=949, type=Mul]; +"950 bert/encoder/layer_3/intermediate/dense/mul_3" [id=950, type=Mul]; +"951 QuantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3^0_1" [id=951, label="951 QuantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"952 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3^0_1" [id=952, label="952 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"953 QuantizeLinear_bert/encoder/layer_3/output/dense/kernel^0_1" [id=953, label="953 QuantizeLinear_bert/encoder/layer_3/output/dense/kernel:0_1", type=QuantizeLinear]; +"954 DequantizeLinear_bert/encoder/layer_3/output/dense/kernel^0_1" [id=954, label="954 DequantizeLinear_bert/encoder/layer_3/output/dense/kernel:0_1", type=DequantizeLinear]; +"955 bert/encoder/layer_3/output/dense/MatMul" [id=955, type=MatMul]; +"956 bert/encoder/layer_3/output/dense/BiasAdd" [id=956, type=Add]; +"957 bert/encoder/layer_3/output/add" [id=957, type=Add]; +"958 bert/encoder/layer_3/output/LayerNorm/moments/mean" [id=958, type=ReduceMean]; +"959 bert/encoder/layer_3/output/LayerNorm/moments/StopGradient" [id=959, type=Identity]; +"960 bert/encoder/layer_3/output/LayerNorm/moments/SquaredDifference" [id=960, type=Sub]; +"961 bert/encoder/layer_3/output/LayerNorm/moments/SquaredDifference__355" [id=961, type=Mul]; +"962 bert/encoder/layer_3/output/LayerNorm/moments/variance" [id=962, type=ReduceMean]; +"963 bert/encoder/layer_3/output/LayerNorm/batchnorm/add" [id=963, type=Add]; +"964 bert/encoder/layer_3/output/LayerNorm/batchnorm/Rsqrt" [id=964, type=Sqrt]; +"965 bert/encoder/layer_3/output/LayerNorm/batchnorm/Rsqrt__357" [id=965, type=Reciprocal]; +"966 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul" [id=966, type=Mul]; +"967 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_2" [id=967, type=Mul]; +"968 bert/encoder/layer_3/output/LayerNorm/batchnorm/sub" [id=968, type=Sub]; +"969 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_1" [id=969, type=Mul]; +"970 bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1" [id=970, type=Add]; +"971 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_1" [id=971, label="971 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"972 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_1" [id=972, label="972 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"973 QuantizeLinear_bert/encoder/layer_4/attention/self/value/kernel^0_1" [id=973, label="973 QuantizeLinear_bert/encoder/layer_4/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"974 DequantizeLinear_bert/encoder/layer_4/attention/self/value/kernel^0_1" [id=974, label="974 DequantizeLinear_bert/encoder/layer_4/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"975 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_2" [id=975, label="975 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"976 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_2" [id=976, label="976 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"977 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_3" [id=977, label="977 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"978 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_3" [id=978, label="978 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"979 bert/encoder/layer_4/attention/self/value/MatMul" [id=979, type=MatMul]; +"980 bert/encoder/layer_4/attention/self/value/BiasAdd" [id=980, type=Add]; +"981 bert/encoder/layer_4/attention/self/Reshape_2" [id=981, type=Reshape]; +"982 bert/encoder/layer_4/attention/self/transpose_2" [id=982, type=Transpose]; +"983 QuantizeLinear_bert/encoder/layer_4/attention/self/query/kernel^0_1" [id=983, label="983 QuantizeLinear_bert/encoder/layer_4/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"984 DequantizeLinear_bert/encoder/layer_4/attention/self/query/kernel^0_1" [id=984, label="984 DequantizeLinear_bert/encoder/layer_4/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"985 bert/encoder/layer_4/attention/self/query/MatMul" [id=985, type=MatMul]; +"986 bert/encoder/layer_4/attention/self/query/BiasAdd" [id=986, type=Add]; +"987 QuantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd^0_1" [id=987, label="987 QuantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"988 DequantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd^0_1" [id=988, label="988 DequantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"989 bert/encoder/layer_4/attention/self/Reshape" [id=989, type=Reshape]; +"990 bert/encoder/layer_4/attention/self/transpose" [id=990, type=Transpose]; +"991 QuantizeLinear_bert/encoder/layer_4/attention/self/key/kernel^0_1" [id=991, label="991 QuantizeLinear_bert/encoder/layer_4/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"992 DequantizeLinear_bert/encoder/layer_4/attention/self/key/kernel^0_1" [id=992, label="992 DequantizeLinear_bert/encoder/layer_4/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"993 bert/encoder/layer_4/attention/self/key/MatMul" [id=993, type=MatMul]; +"994 bert/encoder/layer_4/attention/self/key/BiasAdd" [id=994, type=Add]; +"995 QuantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd^0_1" [id=995, label="995 QuantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"996 DequantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd^0_1" [id=996, label="996 DequantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"997 bert/encoder/layer_4/attention/self/Reshape_1" [id=997, type=Reshape]; +"998 bert/encoder/layer_4/attention/self/transpose_1" [id=998, type=Transpose]; +"999 bert/encoder/layer_4/attention/self/MatMul__362" [id=999, type=Transpose]; +"1000 bert/encoder/layer_4/attention/self/MatMul" [id=1000, type=MatMul]; +"1001 bert/encoder/layer_4/attention/self/Mul" [id=1001, type=Mul]; +"1002 bert/encoder/layer_4/attention/self/add" [id=1002, type=Add]; +"1003 Shape_nncf_869" [id=1003, type=Shape]; +"1004 Flatten_nncf_870" [id=1004, type=Flatten]; +"1005 bert/encoder/layer_4/attention/self/Softmax" [id=1005, type=Softmax]; +"1006 Reshape_nncf_872" [id=1006, type=Reshape]; +"1007 bert/encoder/layer_4/attention/self/MatMul_1" [id=1007, type=MatMul]; +"1008 QuantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1^0_1" [id=1008, label="1008 QuantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"1009 DequantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1^0_1" [id=1009, label="1009 DequantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"1010 bert/encoder/layer_4/attention/self/transpose_3" [id=1010, type=Transpose]; +"1011 bert/encoder/layer_4/attention/self/Reshape_3" [id=1011, type=Reshape]; +"1012 QuantizeLinear_bert/encoder/layer_4/attention/output/dense/kernel^0_1" [id=1012, label="1012 QuantizeLinear_bert/encoder/layer_4/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"1013 DequantizeLinear_bert/encoder/layer_4/attention/output/dense/kernel^0_1" [id=1013, label="1013 DequantizeLinear_bert/encoder/layer_4/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"1014 bert/encoder/layer_4/attention/output/dense/MatMul" [id=1014, type=MatMul]; +"1015 bert/encoder/layer_4/attention/output/dense/BiasAdd" [id=1015, type=Add]; +"1016 bert/encoder/layer_4/attention/output/add" [id=1016, type=Add]; +"1017 bert/encoder/layer_4/attention/output/LayerNorm/moments/mean" [id=1017, type=ReduceMean]; +"1018 bert/encoder/layer_4/attention/output/LayerNorm/moments/StopGradient" [id=1018, type=Identity]; +"1019 bert/encoder/layer_4/attention/output/LayerNorm/moments/SquaredDifference" [id=1019, type=Sub]; +"1020 bert/encoder/layer_4/attention/output/LayerNorm/moments/SquaredDifference__365" [id=1020, type=Mul]; +"1021 bert/encoder/layer_4/attention/output/LayerNorm/moments/variance" [id=1021, type=ReduceMean]; +"1022 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add" [id=1022, type=Add]; +"1023 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/Rsqrt" [id=1023, type=Sqrt]; +"1024 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/Rsqrt__367" [id=1024, type=Reciprocal]; +"1025 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul" [id=1025, type=Mul]; +"1026 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_2" [id=1026, type=Mul]; +"1027 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/sub" [id=1027, type=Sub]; +"1028 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_1" [id=1028, type=Mul]; +"1029 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1" [id=1029, type=Add]; +"1030 QuantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1030, label="1030 QuantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1031 DequantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1031, label="1031 DequantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1032 QuantizeLinear_bert/encoder/layer_4/intermediate/dense/kernel^0_1" [id=1032, label="1032 QuantizeLinear_bert/encoder/layer_4/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"1033 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/kernel^0_1" [id=1033, label="1033 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"1034 bert/encoder/layer_4/intermediate/dense/MatMul" [id=1034, type=MatMul]; +"1035 bert/encoder/layer_4/intermediate/dense/BiasAdd" [id=1035, type=Add]; +"1036 bert/encoder/layer_4/intermediate/dense/Pow" [id=1036, type=Pow]; +"1037 bert/encoder/layer_4/intermediate/dense/mul" [id=1037, type=Mul]; +"1038 bert/encoder/layer_4/intermediate/dense/add" [id=1038, type=Add]; +"1039 bert/encoder/layer_4/intermediate/dense/mul_1" [id=1039, type=Mul]; +"1040 bert/encoder/layer_4/intermediate/dense/Tanh" [id=1040, type=Tanh]; +"1041 bert/encoder/layer_4/intermediate/dense/add_1" [id=1041, type=Add]; +"1042 bert/encoder/layer_4/intermediate/dense/mul_2" [id=1042, type=Mul]; +"1043 bert/encoder/layer_4/intermediate/dense/mul_3" [id=1043, type=Mul]; +"1044 QuantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3^0_1" [id=1044, label="1044 QuantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"1045 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3^0_1" [id=1045, label="1045 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"1046 QuantizeLinear_bert/encoder/layer_4/output/dense/kernel^0_1" [id=1046, label="1046 QuantizeLinear_bert/encoder/layer_4/output/dense/kernel:0_1", type=QuantizeLinear]; +"1047 DequantizeLinear_bert/encoder/layer_4/output/dense/kernel^0_1" [id=1047, label="1047 DequantizeLinear_bert/encoder/layer_4/output/dense/kernel:0_1", type=DequantizeLinear]; +"1048 bert/encoder/layer_4/output/dense/MatMul" [id=1048, type=MatMul]; +"1049 bert/encoder/layer_4/output/dense/BiasAdd" [id=1049, type=Add]; +"1050 bert/encoder/layer_4/output/add" [id=1050, type=Add]; +"1051 bert/encoder/layer_4/output/LayerNorm/moments/mean" [id=1051, type=ReduceMean]; +"1052 bert/encoder/layer_4/output/LayerNorm/moments/StopGradient" [id=1052, type=Identity]; +"1053 bert/encoder/layer_4/output/LayerNorm/moments/SquaredDifference" [id=1053, type=Sub]; +"1054 bert/encoder/layer_4/output/LayerNorm/moments/SquaredDifference__369" [id=1054, type=Mul]; +"1055 bert/encoder/layer_4/output/LayerNorm/moments/variance" [id=1055, type=ReduceMean]; +"1056 bert/encoder/layer_4/output/LayerNorm/batchnorm/add" [id=1056, type=Add]; +"1057 bert/encoder/layer_4/output/LayerNorm/batchnorm/Rsqrt" [id=1057, type=Sqrt]; +"1058 bert/encoder/layer_4/output/LayerNorm/batchnorm/Rsqrt__371" [id=1058, type=Reciprocal]; +"1059 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul" [id=1059, type=Mul]; +"1060 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_2" [id=1060, type=Mul]; +"1061 bert/encoder/layer_4/output/LayerNorm/batchnorm/sub" [id=1061, type=Sub]; +"1062 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_1" [id=1062, type=Mul]; +"1063 bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1" [id=1063, type=Add]; +"1064 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_1" [id=1064, label="1064 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1065 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_1" [id=1065, label="1065 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1066 QuantizeLinear_bert/encoder/layer_5/attention/self/value/kernel^0_1" [id=1066, label="1066 QuantizeLinear_bert/encoder/layer_5/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"1067 DequantizeLinear_bert/encoder/layer_5/attention/self/value/kernel^0_1" [id=1067, label="1067 DequantizeLinear_bert/encoder/layer_5/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"1068 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_2" [id=1068, label="1068 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"1069 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_2" [id=1069, label="1069 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"1070 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_3" [id=1070, label="1070 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"1071 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_3" [id=1071, label="1071 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"1072 bert/encoder/layer_5/attention/self/value/MatMul" [id=1072, type=MatMul]; +"1073 bert/encoder/layer_5/attention/self/value/BiasAdd" [id=1073, type=Add]; +"1074 bert/encoder/layer_5/attention/self/Reshape_2" [id=1074, type=Reshape]; +"1075 bert/encoder/layer_5/attention/self/transpose_2" [id=1075, type=Transpose]; +"1076 QuantizeLinear_bert/encoder/layer_5/attention/self/query/kernel^0_1" [id=1076, label="1076 QuantizeLinear_bert/encoder/layer_5/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"1077 DequantizeLinear_bert/encoder/layer_5/attention/self/query/kernel^0_1" [id=1077, label="1077 DequantizeLinear_bert/encoder/layer_5/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"1078 bert/encoder/layer_5/attention/self/query/MatMul" [id=1078, type=MatMul]; +"1079 bert/encoder/layer_5/attention/self/query/BiasAdd" [id=1079, type=Add]; +"1080 QuantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd^0_1" [id=1080, label="1080 QuantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"1081 DequantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd^0_1" [id=1081, label="1081 DequantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"1082 bert/encoder/layer_5/attention/self/Reshape" [id=1082, type=Reshape]; +"1083 bert/encoder/layer_5/attention/self/transpose" [id=1083, type=Transpose]; +"1084 QuantizeLinear_bert/encoder/layer_5/attention/self/key/kernel^0_1" [id=1084, label="1084 QuantizeLinear_bert/encoder/layer_5/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"1085 DequantizeLinear_bert/encoder/layer_5/attention/self/key/kernel^0_1" [id=1085, label="1085 DequantizeLinear_bert/encoder/layer_5/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"1086 bert/encoder/layer_5/attention/self/key/MatMul" [id=1086, type=MatMul]; +"1087 bert/encoder/layer_5/attention/self/key/BiasAdd" [id=1087, type=Add]; +"1088 QuantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd^0_1" [id=1088, label="1088 QuantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"1089 DequantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd^0_1" [id=1089, label="1089 DequantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"1090 bert/encoder/layer_5/attention/self/Reshape_1" [id=1090, type=Reshape]; +"1091 bert/encoder/layer_5/attention/self/transpose_1" [id=1091, type=Transpose]; +"1092 bert/encoder/layer_5/attention/self/MatMul__376" [id=1092, type=Transpose]; +"1093 bert/encoder/layer_5/attention/self/MatMul" [id=1093, type=MatMul]; +"1094 bert/encoder/layer_5/attention/self/Mul" [id=1094, type=Mul]; +"1095 bert/encoder/layer_5/attention/self/add" [id=1095, type=Add]; +"1096 Shape_nncf_934" [id=1096, type=Shape]; +"1097 Flatten_nncf_935" [id=1097, type=Flatten]; +"1098 bert/encoder/layer_5/attention/self/Softmax" [id=1098, type=Softmax]; +"1099 Reshape_nncf_937" [id=1099, type=Reshape]; +"1100 bert/encoder/layer_5/attention/self/MatMul_1" [id=1100, type=MatMul]; +"1101 QuantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1^0_1" [id=1101, label="1101 QuantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"1102 DequantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1^0_1" [id=1102, label="1102 DequantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"1103 bert/encoder/layer_5/attention/self/transpose_3" [id=1103, type=Transpose]; +"1104 bert/encoder/layer_5/attention/self/Reshape_3" [id=1104, type=Reshape]; +"1105 QuantizeLinear_bert/encoder/layer_5/attention/output/dense/kernel^0_1" [id=1105, label="1105 QuantizeLinear_bert/encoder/layer_5/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"1106 DequantizeLinear_bert/encoder/layer_5/attention/output/dense/kernel^0_1" [id=1106, label="1106 DequantizeLinear_bert/encoder/layer_5/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"1107 bert/encoder/layer_5/attention/output/dense/MatMul" [id=1107, type=MatMul]; +"1108 bert/encoder/layer_5/attention/output/dense/BiasAdd" [id=1108, type=Add]; +"1109 bert/encoder/layer_5/attention/output/add" [id=1109, type=Add]; +"1110 bert/encoder/layer_5/attention/output/LayerNorm/moments/mean" [id=1110, type=ReduceMean]; +"1111 bert/encoder/layer_5/attention/output/LayerNorm/moments/StopGradient" [id=1111, type=Identity]; +"1112 bert/encoder/layer_5/attention/output/LayerNorm/moments/SquaredDifference" [id=1112, type=Sub]; +"1113 bert/encoder/layer_5/attention/output/LayerNorm/moments/SquaredDifference__379" [id=1113, type=Mul]; +"1114 bert/encoder/layer_5/attention/output/LayerNorm/moments/variance" [id=1114, type=ReduceMean]; +"1115 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add" [id=1115, type=Add]; +"1116 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/Rsqrt" [id=1116, type=Sqrt]; +"1117 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/Rsqrt__381" [id=1117, type=Reciprocal]; +"1118 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul" [id=1118, type=Mul]; +"1119 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_2" [id=1119, type=Mul]; +"1120 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/sub" [id=1120, type=Sub]; +"1121 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_1" [id=1121, type=Mul]; +"1122 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1" [id=1122, type=Add]; +"1123 QuantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1123, label="1123 QuantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1124 DequantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1124, label="1124 DequantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1125 QuantizeLinear_bert/encoder/layer_5/intermediate/dense/kernel^0_1" [id=1125, label="1125 QuantizeLinear_bert/encoder/layer_5/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"1126 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/kernel^0_1" [id=1126, label="1126 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"1127 bert/encoder/layer_5/intermediate/dense/MatMul" [id=1127, type=MatMul]; +"1128 bert/encoder/layer_5/intermediate/dense/BiasAdd" [id=1128, type=Add]; +"1129 bert/encoder/layer_5/intermediate/dense/Pow" [id=1129, type=Pow]; +"1130 bert/encoder/layer_5/intermediate/dense/mul" [id=1130, type=Mul]; +"1131 bert/encoder/layer_5/intermediate/dense/add" [id=1131, type=Add]; +"1132 bert/encoder/layer_5/intermediate/dense/mul_1" [id=1132, type=Mul]; +"1133 bert/encoder/layer_5/intermediate/dense/Tanh" [id=1133, type=Tanh]; +"1134 bert/encoder/layer_5/intermediate/dense/add_1" [id=1134, type=Add]; +"1135 bert/encoder/layer_5/intermediate/dense/mul_2" [id=1135, type=Mul]; +"1136 bert/encoder/layer_5/intermediate/dense/mul_3" [id=1136, type=Mul]; +"1137 QuantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3^0_1" [id=1137, label="1137 QuantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"1138 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3^0_1" [id=1138, label="1138 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"1139 QuantizeLinear_bert/encoder/layer_5/output/dense/kernel^0_1" [id=1139, label="1139 QuantizeLinear_bert/encoder/layer_5/output/dense/kernel:0_1", type=QuantizeLinear]; +"1140 DequantizeLinear_bert/encoder/layer_5/output/dense/kernel^0_1" [id=1140, label="1140 DequantizeLinear_bert/encoder/layer_5/output/dense/kernel:0_1", type=DequantizeLinear]; +"1141 bert/encoder/layer_5/output/dense/MatMul" [id=1141, type=MatMul]; +"1142 bert/encoder/layer_5/output/dense/BiasAdd" [id=1142, type=Add]; +"1143 bert/encoder/layer_5/output/add" [id=1143, type=Add]; +"1144 bert/encoder/layer_5/output/LayerNorm/moments/mean" [id=1144, type=ReduceMean]; +"1145 bert/encoder/layer_5/output/LayerNorm/moments/StopGradient" [id=1145, type=Identity]; +"1146 bert/encoder/layer_5/output/LayerNorm/moments/SquaredDifference" [id=1146, type=Sub]; +"1147 bert/encoder/layer_5/output/LayerNorm/moments/SquaredDifference__383" [id=1147, type=Mul]; +"1148 bert/encoder/layer_5/output/LayerNorm/moments/variance" [id=1148, type=ReduceMean]; +"1149 bert/encoder/layer_5/output/LayerNorm/batchnorm/add" [id=1149, type=Add]; +"1150 bert/encoder/layer_5/output/LayerNorm/batchnorm/Rsqrt" [id=1150, type=Sqrt]; +"1151 bert/encoder/layer_5/output/LayerNorm/batchnorm/Rsqrt__385" [id=1151, type=Reciprocal]; +"1152 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul" [id=1152, type=Mul]; +"1153 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_2" [id=1153, type=Mul]; +"1154 bert/encoder/layer_5/output/LayerNorm/batchnorm/sub" [id=1154, type=Sub]; +"1155 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_1" [id=1155, type=Mul]; +"1156 bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1" [id=1156, type=Add]; +"1157 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_1" [id=1157, label="1157 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1158 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_1" [id=1158, label="1158 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1159 QuantizeLinear_bert/encoder/layer_6/attention/self/value/kernel^0_1" [id=1159, label="1159 QuantizeLinear_bert/encoder/layer_6/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"1160 DequantizeLinear_bert/encoder/layer_6/attention/self/value/kernel^0_1" [id=1160, label="1160 DequantizeLinear_bert/encoder/layer_6/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"1161 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_2" [id=1161, label="1161 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"1162 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_2" [id=1162, label="1162 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"1163 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_3" [id=1163, label="1163 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"1164 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_3" [id=1164, label="1164 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"1165 bert/encoder/layer_6/attention/self/value/MatMul" [id=1165, type=MatMul]; +"1166 bert/encoder/layer_6/attention/self/value/BiasAdd" [id=1166, type=Add]; +"1167 bert/encoder/layer_6/attention/self/Reshape_2" [id=1167, type=Reshape]; +"1168 bert/encoder/layer_6/attention/self/transpose_2" [id=1168, type=Transpose]; +"1169 QuantizeLinear_bert/encoder/layer_6/attention/self/query/kernel^0_1" [id=1169, label="1169 QuantizeLinear_bert/encoder/layer_6/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"1170 DequantizeLinear_bert/encoder/layer_6/attention/self/query/kernel^0_1" [id=1170, label="1170 DequantizeLinear_bert/encoder/layer_6/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"1171 bert/encoder/layer_6/attention/self/query/MatMul" [id=1171, type=MatMul]; +"1172 bert/encoder/layer_6/attention/self/query/BiasAdd" [id=1172, type=Add]; +"1173 QuantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd^0_1" [id=1173, label="1173 QuantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"1174 DequantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd^0_1" [id=1174, label="1174 DequantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"1175 bert/encoder/layer_6/attention/self/Reshape" [id=1175, type=Reshape]; +"1176 bert/encoder/layer_6/attention/self/transpose" [id=1176, type=Transpose]; +"1177 QuantizeLinear_bert/encoder/layer_6/attention/self/key/kernel^0_1" [id=1177, label="1177 QuantizeLinear_bert/encoder/layer_6/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"1178 DequantizeLinear_bert/encoder/layer_6/attention/self/key/kernel^0_1" [id=1178, label="1178 DequantizeLinear_bert/encoder/layer_6/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"1179 bert/encoder/layer_6/attention/self/key/MatMul" [id=1179, type=MatMul]; +"1180 bert/encoder/layer_6/attention/self/key/BiasAdd" [id=1180, type=Add]; +"1181 QuantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd^0_1" [id=1181, label="1181 QuantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"1182 DequantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd^0_1" [id=1182, label="1182 DequantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"1183 bert/encoder/layer_6/attention/self/Reshape_1" [id=1183, type=Reshape]; +"1184 bert/encoder/layer_6/attention/self/transpose_1" [id=1184, type=Transpose]; +"1185 bert/encoder/layer_6/attention/self/MatMul__390" [id=1185, type=Transpose]; +"1186 bert/encoder/layer_6/attention/self/MatMul" [id=1186, type=MatMul]; +"1187 bert/encoder/layer_6/attention/self/Mul" [id=1187, type=Mul]; +"1188 bert/encoder/layer_6/attention/self/add" [id=1188, type=Add]; +"1189 Shape_nncf_999" [id=1189, type=Shape]; +"1190 Flatten_nncf_1000" [id=1190, type=Flatten]; +"1191 bert/encoder/layer_6/attention/self/Softmax" [id=1191, type=Softmax]; +"1192 Reshape_nncf_1002" [id=1192, type=Reshape]; +"1193 bert/encoder/layer_6/attention/self/MatMul_1" [id=1193, type=MatMul]; +"1194 QuantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1^0_1" [id=1194, label="1194 QuantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"1195 DequantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1^0_1" [id=1195, label="1195 DequantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"1196 bert/encoder/layer_6/attention/self/transpose_3" [id=1196, type=Transpose]; +"1197 bert/encoder/layer_6/attention/self/Reshape_3" [id=1197, type=Reshape]; +"1198 QuantizeLinear_bert/encoder/layer_6/attention/output/dense/kernel^0_1" [id=1198, label="1198 QuantizeLinear_bert/encoder/layer_6/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"1199 DequantizeLinear_bert/encoder/layer_6/attention/output/dense/kernel^0_1" [id=1199, label="1199 DequantizeLinear_bert/encoder/layer_6/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"1200 bert/encoder/layer_6/attention/output/dense/MatMul" [id=1200, type=MatMul]; +"1201 bert/encoder/layer_6/attention/output/dense/BiasAdd" [id=1201, type=Add]; +"1202 bert/encoder/layer_6/attention/output/add" [id=1202, type=Add]; +"1203 bert/encoder/layer_6/attention/output/LayerNorm/moments/mean" [id=1203, type=ReduceMean]; +"1204 bert/encoder/layer_6/attention/output/LayerNorm/moments/StopGradient" [id=1204, type=Identity]; +"1205 bert/encoder/layer_6/attention/output/LayerNorm/moments/SquaredDifference" [id=1205, type=Sub]; +"1206 bert/encoder/layer_6/attention/output/LayerNorm/moments/SquaredDifference__393" [id=1206, type=Mul]; +"1207 bert/encoder/layer_6/attention/output/LayerNorm/moments/variance" [id=1207, type=ReduceMean]; +"1208 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add" [id=1208, type=Add]; +"1209 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/Rsqrt" [id=1209, type=Sqrt]; +"1210 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/Rsqrt__395" [id=1210, type=Reciprocal]; +"1211 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul" [id=1211, type=Mul]; +"1212 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_2" [id=1212, type=Mul]; +"1213 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/sub" [id=1213, type=Sub]; +"1214 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_1" [id=1214, type=Mul]; +"1215 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1" [id=1215, type=Add]; +"1216 QuantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1216, label="1216 QuantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1217 DequantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1217, label="1217 DequantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1218 QuantizeLinear_bert/encoder/layer_6/intermediate/dense/kernel^0_1" [id=1218, label="1218 QuantizeLinear_bert/encoder/layer_6/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"1219 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/kernel^0_1" [id=1219, label="1219 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"1220 bert/encoder/layer_6/intermediate/dense/MatMul" [id=1220, type=MatMul]; +"1221 bert/encoder/layer_6/intermediate/dense/BiasAdd" [id=1221, type=Add]; +"1222 bert/encoder/layer_6/intermediate/dense/Pow" [id=1222, type=Pow]; +"1223 bert/encoder/layer_6/intermediate/dense/mul" [id=1223, type=Mul]; +"1224 bert/encoder/layer_6/intermediate/dense/add" [id=1224, type=Add]; +"1225 bert/encoder/layer_6/intermediate/dense/mul_1" [id=1225, type=Mul]; +"1226 bert/encoder/layer_6/intermediate/dense/Tanh" [id=1226, type=Tanh]; +"1227 bert/encoder/layer_6/intermediate/dense/add_1" [id=1227, type=Add]; +"1228 bert/encoder/layer_6/intermediate/dense/mul_2" [id=1228, type=Mul]; +"1229 bert/encoder/layer_6/intermediate/dense/mul_3" [id=1229, type=Mul]; +"1230 QuantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3^0_1" [id=1230, label="1230 QuantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"1231 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3^0_1" [id=1231, label="1231 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"1232 QuantizeLinear_bert/encoder/layer_6/output/dense/kernel^0_1" [id=1232, label="1232 QuantizeLinear_bert/encoder/layer_6/output/dense/kernel:0_1", type=QuantizeLinear]; +"1233 DequantizeLinear_bert/encoder/layer_6/output/dense/kernel^0_1" [id=1233, label="1233 DequantizeLinear_bert/encoder/layer_6/output/dense/kernel:0_1", type=DequantizeLinear]; +"1234 bert/encoder/layer_6/output/dense/MatMul" [id=1234, type=MatMul]; +"1235 bert/encoder/layer_6/output/dense/BiasAdd" [id=1235, type=Add]; +"1236 bert/encoder/layer_6/output/add" [id=1236, type=Add]; +"1237 bert/encoder/layer_6/output/LayerNorm/moments/mean" [id=1237, type=ReduceMean]; +"1238 bert/encoder/layer_6/output/LayerNorm/moments/StopGradient" [id=1238, type=Identity]; +"1239 bert/encoder/layer_6/output/LayerNorm/moments/SquaredDifference" [id=1239, type=Sub]; +"1240 bert/encoder/layer_6/output/LayerNorm/moments/SquaredDifference__397" [id=1240, type=Mul]; +"1241 bert/encoder/layer_6/output/LayerNorm/moments/variance" [id=1241, type=ReduceMean]; +"1242 bert/encoder/layer_6/output/LayerNorm/batchnorm/add" [id=1242, type=Add]; +"1243 bert/encoder/layer_6/output/LayerNorm/batchnorm/Rsqrt" [id=1243, type=Sqrt]; +"1244 bert/encoder/layer_6/output/LayerNorm/batchnorm/Rsqrt__399" [id=1244, type=Reciprocal]; +"1245 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul" [id=1245, type=Mul]; +"1246 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_2" [id=1246, type=Mul]; +"1247 bert/encoder/layer_6/output/LayerNorm/batchnorm/sub" [id=1247, type=Sub]; +"1248 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_1" [id=1248, type=Mul]; +"1249 bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1" [id=1249, type=Add]; +"1250 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_1" [id=1250, label="1250 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1251 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_1" [id=1251, label="1251 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1252 QuantizeLinear_bert/encoder/layer_7/attention/self/value/kernel^0_1" [id=1252, label="1252 QuantizeLinear_bert/encoder/layer_7/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"1253 DequantizeLinear_bert/encoder/layer_7/attention/self/value/kernel^0_1" [id=1253, label="1253 DequantizeLinear_bert/encoder/layer_7/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"1254 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_2" [id=1254, label="1254 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"1255 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_2" [id=1255, label="1255 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"1256 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_3" [id=1256, label="1256 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"1257 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_3" [id=1257, label="1257 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"1258 bert/encoder/layer_7/attention/self/value/MatMul" [id=1258, type=MatMul]; +"1259 bert/encoder/layer_7/attention/self/value/BiasAdd" [id=1259, type=Add]; +"1260 bert/encoder/layer_7/attention/self/Reshape_2" [id=1260, type=Reshape]; +"1261 bert/encoder/layer_7/attention/self/transpose_2" [id=1261, type=Transpose]; +"1262 QuantizeLinear_bert/encoder/layer_7/attention/self/query/kernel^0_1" [id=1262, label="1262 QuantizeLinear_bert/encoder/layer_7/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"1263 DequantizeLinear_bert/encoder/layer_7/attention/self/query/kernel^0_1" [id=1263, label="1263 DequantizeLinear_bert/encoder/layer_7/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"1264 bert/encoder/layer_7/attention/self/query/MatMul" [id=1264, type=MatMul]; +"1265 bert/encoder/layer_7/attention/self/query/BiasAdd" [id=1265, type=Add]; +"1266 QuantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd^0_1" [id=1266, label="1266 QuantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"1267 DequantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd^0_1" [id=1267, label="1267 DequantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"1268 bert/encoder/layer_7/attention/self/Reshape" [id=1268, type=Reshape]; +"1269 bert/encoder/layer_7/attention/self/transpose" [id=1269, type=Transpose]; +"1270 QuantizeLinear_bert/encoder/layer_7/attention/self/key/kernel^0_1" [id=1270, label="1270 QuantizeLinear_bert/encoder/layer_7/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"1271 DequantizeLinear_bert/encoder/layer_7/attention/self/key/kernel^0_1" [id=1271, label="1271 DequantizeLinear_bert/encoder/layer_7/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"1272 bert/encoder/layer_7/attention/self/key/MatMul" [id=1272, type=MatMul]; +"1273 bert/encoder/layer_7/attention/self/key/BiasAdd" [id=1273, type=Add]; +"1274 QuantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd^0_1" [id=1274, label="1274 QuantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"1275 DequantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd^0_1" [id=1275, label="1275 DequantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"1276 bert/encoder/layer_7/attention/self/Reshape_1" [id=1276, type=Reshape]; +"1277 bert/encoder/layer_7/attention/self/transpose_1" [id=1277, type=Transpose]; +"1278 bert/encoder/layer_7/attention/self/MatMul__404" [id=1278, type=Transpose]; +"1279 bert/encoder/layer_7/attention/self/MatMul" [id=1279, type=MatMul]; +"1280 bert/encoder/layer_7/attention/self/Mul" [id=1280, type=Mul]; +"1281 bert/encoder/layer_7/attention/self/add" [id=1281, type=Add]; +"1282 Shape_nncf_1064" [id=1282, type=Shape]; +"1283 Flatten_nncf_1065" [id=1283, type=Flatten]; +"1284 bert/encoder/layer_7/attention/self/Softmax" [id=1284, type=Softmax]; +"1285 Reshape_nncf_1067" [id=1285, type=Reshape]; +"1286 bert/encoder/layer_7/attention/self/MatMul_1" [id=1286, type=MatMul]; +"1287 QuantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1^0_1" [id=1287, label="1287 QuantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"1288 DequantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1^0_1" [id=1288, label="1288 DequantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"1289 bert/encoder/layer_7/attention/self/transpose_3" [id=1289, type=Transpose]; +"1290 bert/encoder/layer_7/attention/self/Reshape_3" [id=1290, type=Reshape]; +"1291 QuantizeLinear_bert/encoder/layer_7/attention/output/dense/kernel^0_1" [id=1291, label="1291 QuantizeLinear_bert/encoder/layer_7/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"1292 DequantizeLinear_bert/encoder/layer_7/attention/output/dense/kernel^0_1" [id=1292, label="1292 DequantizeLinear_bert/encoder/layer_7/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"1293 bert/encoder/layer_7/attention/output/dense/MatMul" [id=1293, type=MatMul]; +"1294 bert/encoder/layer_7/attention/output/dense/BiasAdd" [id=1294, type=Add]; +"1295 bert/encoder/layer_7/attention/output/add" [id=1295, type=Add]; +"1296 bert/encoder/layer_7/attention/output/LayerNorm/moments/mean" [id=1296, type=ReduceMean]; +"1297 bert/encoder/layer_7/attention/output/LayerNorm/moments/StopGradient" [id=1297, type=Identity]; +"1298 bert/encoder/layer_7/attention/output/LayerNorm/moments/SquaredDifference" [id=1298, type=Sub]; +"1299 bert/encoder/layer_7/attention/output/LayerNorm/moments/SquaredDifference__407" [id=1299, type=Mul]; +"1300 bert/encoder/layer_7/attention/output/LayerNorm/moments/variance" [id=1300, type=ReduceMean]; +"1301 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add" [id=1301, type=Add]; +"1302 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/Rsqrt" [id=1302, type=Sqrt]; +"1303 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/Rsqrt__409" [id=1303, type=Reciprocal]; +"1304 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul" [id=1304, type=Mul]; +"1305 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_2" [id=1305, type=Mul]; +"1306 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/sub" [id=1306, type=Sub]; +"1307 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_1" [id=1307, type=Mul]; +"1308 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1" [id=1308, type=Add]; +"1309 QuantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1309, label="1309 QuantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1310 DequantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1310, label="1310 DequantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1311 QuantizeLinear_bert/encoder/layer_7/intermediate/dense/kernel^0_1" [id=1311, label="1311 QuantizeLinear_bert/encoder/layer_7/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"1312 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/kernel^0_1" [id=1312, label="1312 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"1313 bert/encoder/layer_7/intermediate/dense/MatMul" [id=1313, type=MatMul]; +"1314 bert/encoder/layer_7/intermediate/dense/BiasAdd" [id=1314, type=Add]; +"1315 bert/encoder/layer_7/intermediate/dense/Pow" [id=1315, type=Pow]; +"1316 bert/encoder/layer_7/intermediate/dense/mul" [id=1316, type=Mul]; +"1317 bert/encoder/layer_7/intermediate/dense/add" [id=1317, type=Add]; +"1318 bert/encoder/layer_7/intermediate/dense/mul_1" [id=1318, type=Mul]; +"1319 bert/encoder/layer_7/intermediate/dense/Tanh" [id=1319, type=Tanh]; +"1320 bert/encoder/layer_7/intermediate/dense/add_1" [id=1320, type=Add]; +"1321 bert/encoder/layer_7/intermediate/dense/mul_2" [id=1321, type=Mul]; +"1322 bert/encoder/layer_7/intermediate/dense/mul_3" [id=1322, type=Mul]; +"1323 QuantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3^0_1" [id=1323, label="1323 QuantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"1324 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3^0_1" [id=1324, label="1324 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"1325 QuantizeLinear_bert/encoder/layer_7/output/dense/kernel^0_1" [id=1325, label="1325 QuantizeLinear_bert/encoder/layer_7/output/dense/kernel:0_1", type=QuantizeLinear]; +"1326 DequantizeLinear_bert/encoder/layer_7/output/dense/kernel^0_1" [id=1326, label="1326 DequantizeLinear_bert/encoder/layer_7/output/dense/kernel:0_1", type=DequantizeLinear]; +"1327 bert/encoder/layer_7/output/dense/MatMul" [id=1327, type=MatMul]; +"1328 bert/encoder/layer_7/output/dense/BiasAdd" [id=1328, type=Add]; +"1329 bert/encoder/layer_7/output/add" [id=1329, type=Add]; +"1330 bert/encoder/layer_7/output/LayerNorm/moments/mean" [id=1330, type=ReduceMean]; +"1331 bert/encoder/layer_7/output/LayerNorm/moments/StopGradient" [id=1331, type=Identity]; +"1332 bert/encoder/layer_7/output/LayerNorm/moments/SquaredDifference" [id=1332, type=Sub]; +"1333 bert/encoder/layer_7/output/LayerNorm/moments/SquaredDifference__411" [id=1333, type=Mul]; +"1334 bert/encoder/layer_7/output/LayerNorm/moments/variance" [id=1334, type=ReduceMean]; +"1335 bert/encoder/layer_7/output/LayerNorm/batchnorm/add" [id=1335, type=Add]; +"1336 bert/encoder/layer_7/output/LayerNorm/batchnorm/Rsqrt" [id=1336, type=Sqrt]; +"1337 bert/encoder/layer_7/output/LayerNorm/batchnorm/Rsqrt__413" [id=1337, type=Reciprocal]; +"1338 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul" [id=1338, type=Mul]; +"1339 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_2" [id=1339, type=Mul]; +"1340 bert/encoder/layer_7/output/LayerNorm/batchnorm/sub" [id=1340, type=Sub]; +"1341 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_1" [id=1341, type=Mul]; +"1342 bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1" [id=1342, type=Add]; +"1343 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_1" [id=1343, label="1343 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1344 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_1" [id=1344, label="1344 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1345 QuantizeLinear_bert/encoder/layer_8/attention/self/value/kernel^0_1" [id=1345, label="1345 QuantizeLinear_bert/encoder/layer_8/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"1346 DequantizeLinear_bert/encoder/layer_8/attention/self/value/kernel^0_1" [id=1346, label="1346 DequantizeLinear_bert/encoder/layer_8/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"1347 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_2" [id=1347, label="1347 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"1348 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_2" [id=1348, label="1348 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"1349 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_3" [id=1349, label="1349 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"1350 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_3" [id=1350, label="1350 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"1351 bert/encoder/layer_8/attention/self/value/MatMul" [id=1351, type=MatMul]; +"1352 bert/encoder/layer_8/attention/self/value/BiasAdd" [id=1352, type=Add]; +"1353 bert/encoder/layer_8/attention/self/Reshape_2" [id=1353, type=Reshape]; +"1354 bert/encoder/layer_8/attention/self/transpose_2" [id=1354, type=Transpose]; +"1355 QuantizeLinear_bert/encoder/layer_8/attention/self/query/kernel^0_1" [id=1355, label="1355 QuantizeLinear_bert/encoder/layer_8/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"1356 DequantizeLinear_bert/encoder/layer_8/attention/self/query/kernel^0_1" [id=1356, label="1356 DequantizeLinear_bert/encoder/layer_8/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"1357 bert/encoder/layer_8/attention/self/query/MatMul" [id=1357, type=MatMul]; +"1358 bert/encoder/layer_8/attention/self/query/BiasAdd" [id=1358, type=Add]; +"1359 QuantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd^0_1" [id=1359, label="1359 QuantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"1360 DequantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd^0_1" [id=1360, label="1360 DequantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"1361 bert/encoder/layer_8/attention/self/Reshape" [id=1361, type=Reshape]; +"1362 bert/encoder/layer_8/attention/self/transpose" [id=1362, type=Transpose]; +"1363 QuantizeLinear_bert/encoder/layer_8/attention/self/key/kernel^0_1" [id=1363, label="1363 QuantizeLinear_bert/encoder/layer_8/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"1364 DequantizeLinear_bert/encoder/layer_8/attention/self/key/kernel^0_1" [id=1364, label="1364 DequantizeLinear_bert/encoder/layer_8/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"1365 bert/encoder/layer_8/attention/self/key/MatMul" [id=1365, type=MatMul]; +"1366 bert/encoder/layer_8/attention/self/key/BiasAdd" [id=1366, type=Add]; +"1367 QuantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd^0_1" [id=1367, label="1367 QuantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"1368 DequantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd^0_1" [id=1368, label="1368 DequantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"1369 bert/encoder/layer_8/attention/self/Reshape_1" [id=1369, type=Reshape]; +"1370 bert/encoder/layer_8/attention/self/transpose_1" [id=1370, type=Transpose]; +"1371 bert/encoder/layer_8/attention/self/MatMul__418" [id=1371, type=Transpose]; +"1372 bert/encoder/layer_8/attention/self/MatMul" [id=1372, type=MatMul]; +"1373 bert/encoder/layer_8/attention/self/Mul" [id=1373, type=Mul]; +"1374 bert/encoder/layer_8/attention/self/add" [id=1374, type=Add]; +"1375 Shape_nncf_1129" [id=1375, type=Shape]; +"1376 Flatten_nncf_1130" [id=1376, type=Flatten]; +"1377 bert/encoder/layer_8/attention/self/Softmax" [id=1377, type=Softmax]; +"1378 Reshape_nncf_1132" [id=1378, type=Reshape]; +"1379 bert/encoder/layer_8/attention/self/MatMul_1" [id=1379, type=MatMul]; +"1380 QuantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1^0_1" [id=1380, label="1380 QuantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"1381 DequantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1^0_1" [id=1381, label="1381 DequantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"1382 bert/encoder/layer_8/attention/self/transpose_3" [id=1382, type=Transpose]; +"1383 bert/encoder/layer_8/attention/self/Reshape_3" [id=1383, type=Reshape]; +"1384 QuantizeLinear_bert/encoder/layer_8/attention/output/dense/kernel^0_1" [id=1384, label="1384 QuantizeLinear_bert/encoder/layer_8/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"1385 DequantizeLinear_bert/encoder/layer_8/attention/output/dense/kernel^0_1" [id=1385, label="1385 DequantizeLinear_bert/encoder/layer_8/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"1386 bert/encoder/layer_8/attention/output/dense/MatMul" [id=1386, type=MatMul]; +"1387 bert/encoder/layer_8/attention/output/dense/BiasAdd" [id=1387, type=Add]; +"1388 bert/encoder/layer_8/attention/output/add" [id=1388, type=Add]; +"1389 bert/encoder/layer_8/attention/output/LayerNorm/moments/mean" [id=1389, type=ReduceMean]; +"1390 bert/encoder/layer_8/attention/output/LayerNorm/moments/StopGradient" [id=1390, type=Identity]; +"1391 bert/encoder/layer_8/attention/output/LayerNorm/moments/SquaredDifference" [id=1391, type=Sub]; +"1392 bert/encoder/layer_8/attention/output/LayerNorm/moments/SquaredDifference__421" [id=1392, type=Mul]; +"1393 bert/encoder/layer_8/attention/output/LayerNorm/moments/variance" [id=1393, type=ReduceMean]; +"1394 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add" [id=1394, type=Add]; +"1395 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/Rsqrt" [id=1395, type=Sqrt]; +"1396 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/Rsqrt__423" [id=1396, type=Reciprocal]; +"1397 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul" [id=1397, type=Mul]; +"1398 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_2" [id=1398, type=Mul]; +"1399 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/sub" [id=1399, type=Sub]; +"1400 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_1" [id=1400, type=Mul]; +"1401 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1" [id=1401, type=Add]; +"1402 QuantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1402, label="1402 QuantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1403 DequantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1403, label="1403 DequantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1404 QuantizeLinear_bert/encoder/layer_8/intermediate/dense/kernel^0_1" [id=1404, label="1404 QuantizeLinear_bert/encoder/layer_8/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"1405 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/kernel^0_1" [id=1405, label="1405 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"1406 bert/encoder/layer_8/intermediate/dense/MatMul" [id=1406, type=MatMul]; +"1407 bert/encoder/layer_8/intermediate/dense/BiasAdd" [id=1407, type=Add]; +"1408 bert/encoder/layer_8/intermediate/dense/Pow" [id=1408, type=Pow]; +"1409 bert/encoder/layer_8/intermediate/dense/mul" [id=1409, type=Mul]; +"1410 bert/encoder/layer_8/intermediate/dense/add" [id=1410, type=Add]; +"1411 bert/encoder/layer_8/intermediate/dense/mul_1" [id=1411, type=Mul]; +"1412 bert/encoder/layer_8/intermediate/dense/Tanh" [id=1412, type=Tanh]; +"1413 bert/encoder/layer_8/intermediate/dense/add_1" [id=1413, type=Add]; +"1414 bert/encoder/layer_8/intermediate/dense/mul_2" [id=1414, type=Mul]; +"1415 bert/encoder/layer_8/intermediate/dense/mul_3" [id=1415, type=Mul]; +"1416 QuantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3^0_1" [id=1416, label="1416 QuantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"1417 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3^0_1" [id=1417, label="1417 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"1418 QuantizeLinear_bert/encoder/layer_8/output/dense/kernel^0_1" [id=1418, label="1418 QuantizeLinear_bert/encoder/layer_8/output/dense/kernel:0_1", type=QuantizeLinear]; +"1419 DequantizeLinear_bert/encoder/layer_8/output/dense/kernel^0_1" [id=1419, label="1419 DequantizeLinear_bert/encoder/layer_8/output/dense/kernel:0_1", type=DequantizeLinear]; +"1420 bert/encoder/layer_8/output/dense/MatMul" [id=1420, type=MatMul]; +"1421 bert/encoder/layer_8/output/dense/BiasAdd" [id=1421, type=Add]; +"1422 bert/encoder/layer_8/output/add" [id=1422, type=Add]; +"1423 bert/encoder/layer_8/output/LayerNorm/moments/mean" [id=1423, type=ReduceMean]; +"1424 bert/encoder/layer_8/output/LayerNorm/moments/StopGradient" [id=1424, type=Identity]; +"1425 bert/encoder/layer_8/output/LayerNorm/moments/SquaredDifference" [id=1425, type=Sub]; +"1426 bert/encoder/layer_8/output/LayerNorm/moments/SquaredDifference__425" [id=1426, type=Mul]; +"1427 bert/encoder/layer_8/output/LayerNorm/moments/variance" [id=1427, type=ReduceMean]; +"1428 bert/encoder/layer_8/output/LayerNorm/batchnorm/add" [id=1428, type=Add]; +"1429 bert/encoder/layer_8/output/LayerNorm/batchnorm/Rsqrt" [id=1429, type=Sqrt]; +"1430 bert/encoder/layer_8/output/LayerNorm/batchnorm/Rsqrt__427" [id=1430, type=Reciprocal]; +"1431 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul" [id=1431, type=Mul]; +"1432 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_2" [id=1432, type=Mul]; +"1433 bert/encoder/layer_8/output/LayerNorm/batchnorm/sub" [id=1433, type=Sub]; +"1434 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_1" [id=1434, type=Mul]; +"1435 bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1" [id=1435, type=Add]; +"1436 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_1" [id=1436, label="1436 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1437 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_1" [id=1437, label="1437 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1438 QuantizeLinear_bert/encoder/layer_9/attention/self/value/kernel^0_1" [id=1438, label="1438 QuantizeLinear_bert/encoder/layer_9/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"1439 DequantizeLinear_bert/encoder/layer_9/attention/self/value/kernel^0_1" [id=1439, label="1439 DequantizeLinear_bert/encoder/layer_9/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"1440 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_2" [id=1440, label="1440 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"1441 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_2" [id=1441, label="1441 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"1442 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_3" [id=1442, label="1442 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"1443 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_3" [id=1443, label="1443 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"1444 bert/encoder/layer_9/attention/self/value/MatMul" [id=1444, type=MatMul]; +"1445 bert/encoder/layer_9/attention/self/value/BiasAdd" [id=1445, type=Add]; +"1446 bert/encoder/layer_9/attention/self/Reshape_2" [id=1446, type=Reshape]; +"1447 bert/encoder/layer_9/attention/self/transpose_2" [id=1447, type=Transpose]; +"1448 QuantizeLinear_bert/encoder/layer_9/attention/self/query/kernel^0_1" [id=1448, label="1448 QuantizeLinear_bert/encoder/layer_9/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"1449 DequantizeLinear_bert/encoder/layer_9/attention/self/query/kernel^0_1" [id=1449, label="1449 DequantizeLinear_bert/encoder/layer_9/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"1450 bert/encoder/layer_9/attention/self/query/MatMul" [id=1450, type=MatMul]; +"1451 bert/encoder/layer_9/attention/self/query/BiasAdd" [id=1451, type=Add]; +"1452 QuantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd^0_1" [id=1452, label="1452 QuantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"1453 DequantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd^0_1" [id=1453, label="1453 DequantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"1454 bert/encoder/layer_9/attention/self/Reshape" [id=1454, type=Reshape]; +"1455 bert/encoder/layer_9/attention/self/transpose" [id=1455, type=Transpose]; +"1456 QuantizeLinear_bert/encoder/layer_9/attention/self/key/kernel^0_1" [id=1456, label="1456 QuantizeLinear_bert/encoder/layer_9/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"1457 DequantizeLinear_bert/encoder/layer_9/attention/self/key/kernel^0_1" [id=1457, label="1457 DequantizeLinear_bert/encoder/layer_9/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"1458 bert/encoder/layer_9/attention/self/key/MatMul" [id=1458, type=MatMul]; +"1459 bert/encoder/layer_9/attention/self/key/BiasAdd" [id=1459, type=Add]; +"1460 QuantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd^0_1" [id=1460, label="1460 QuantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"1461 DequantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd^0_1" [id=1461, label="1461 DequantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"1462 bert/encoder/layer_9/attention/self/Reshape_1" [id=1462, type=Reshape]; +"1463 bert/encoder/layer_9/attention/self/transpose_1" [id=1463, type=Transpose]; +"1464 bert/encoder/layer_9/attention/self/MatMul__432" [id=1464, type=Transpose]; +"1465 bert/encoder/layer_9/attention/self/MatMul" [id=1465, type=MatMul]; +"1466 bert/encoder/layer_9/attention/self/Mul" [id=1466, type=Mul]; +"1467 bert/encoder/layer_9/attention/self/add" [id=1467, type=Add]; +"1468 Shape_nncf_1194" [id=1468, type=Shape]; +"1469 Flatten_nncf_1195" [id=1469, type=Flatten]; +"1470 bert/encoder/layer_9/attention/self/Softmax" [id=1470, type=Softmax]; +"1471 Reshape_nncf_1197" [id=1471, type=Reshape]; +"1472 bert/encoder/layer_9/attention/self/MatMul_1" [id=1472, type=MatMul]; +"1473 QuantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1^0_1" [id=1473, label="1473 QuantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"1474 DequantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1^0_1" [id=1474, label="1474 DequantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"1475 bert/encoder/layer_9/attention/self/transpose_3" [id=1475, type=Transpose]; +"1476 bert/encoder/layer_9/attention/self/Reshape_3" [id=1476, type=Reshape]; +"1477 QuantizeLinear_bert/encoder/layer_9/attention/output/dense/kernel^0_1" [id=1477, label="1477 QuantizeLinear_bert/encoder/layer_9/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"1478 DequantizeLinear_bert/encoder/layer_9/attention/output/dense/kernel^0_1" [id=1478, label="1478 DequantizeLinear_bert/encoder/layer_9/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"1479 bert/encoder/layer_9/attention/output/dense/MatMul" [id=1479, type=MatMul]; +"1480 bert/encoder/layer_9/attention/output/dense/BiasAdd" [id=1480, type=Add]; +"1481 bert/encoder/layer_9/attention/output/add" [id=1481, type=Add]; +"1482 bert/encoder/layer_9/attention/output/LayerNorm/moments/mean" [id=1482, type=ReduceMean]; +"1483 bert/encoder/layer_9/attention/output/LayerNorm/moments/StopGradient" [id=1483, type=Identity]; +"1484 bert/encoder/layer_9/attention/output/LayerNorm/moments/SquaredDifference" [id=1484, type=Sub]; +"1485 bert/encoder/layer_9/attention/output/LayerNorm/moments/SquaredDifference__435" [id=1485, type=Mul]; +"1486 bert/encoder/layer_9/attention/output/LayerNorm/moments/variance" [id=1486, type=ReduceMean]; +"1487 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add" [id=1487, type=Add]; +"1488 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/Rsqrt" [id=1488, type=Sqrt]; +"1489 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/Rsqrt__437" [id=1489, type=Reciprocal]; +"1490 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul" [id=1490, type=Mul]; +"1491 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_2" [id=1491, type=Mul]; +"1492 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/sub" [id=1492, type=Sub]; +"1493 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_1" [id=1493, type=Mul]; +"1494 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1" [id=1494, type=Add]; +"1495 QuantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1495, label="1495 QuantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1496 DequantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1496, label="1496 DequantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1497 QuantizeLinear_bert/encoder/layer_9/intermediate/dense/kernel^0_1" [id=1497, label="1497 QuantizeLinear_bert/encoder/layer_9/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"1498 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/kernel^0_1" [id=1498, label="1498 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"1499 bert/encoder/layer_9/intermediate/dense/MatMul" [id=1499, type=MatMul]; +"1500 bert/encoder/layer_9/intermediate/dense/BiasAdd" [id=1500, type=Add]; +"1501 bert/encoder/layer_9/intermediate/dense/Pow" [id=1501, type=Pow]; +"1502 bert/encoder/layer_9/intermediate/dense/mul" [id=1502, type=Mul]; +"1503 bert/encoder/layer_9/intermediate/dense/add" [id=1503, type=Add]; +"1504 bert/encoder/layer_9/intermediate/dense/mul_1" [id=1504, type=Mul]; +"1505 bert/encoder/layer_9/intermediate/dense/Tanh" [id=1505, type=Tanh]; +"1506 bert/encoder/layer_9/intermediate/dense/add_1" [id=1506, type=Add]; +"1507 bert/encoder/layer_9/intermediate/dense/mul_2" [id=1507, type=Mul]; +"1508 bert/encoder/layer_9/intermediate/dense/mul_3" [id=1508, type=Mul]; +"1509 QuantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3^0_1" [id=1509, label="1509 QuantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"1510 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3^0_1" [id=1510, label="1510 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"1511 QuantizeLinear_bert/encoder/layer_9/output/dense/kernel^0_1" [id=1511, label="1511 QuantizeLinear_bert/encoder/layer_9/output/dense/kernel:0_1", type=QuantizeLinear]; +"1512 DequantizeLinear_bert/encoder/layer_9/output/dense/kernel^0_1" [id=1512, label="1512 DequantizeLinear_bert/encoder/layer_9/output/dense/kernel:0_1", type=DequantizeLinear]; +"1513 bert/encoder/layer_9/output/dense/MatMul" [id=1513, type=MatMul]; +"1514 bert/encoder/layer_9/output/dense/BiasAdd" [id=1514, type=Add]; +"1515 bert/encoder/layer_9/output/add" [id=1515, type=Add]; +"1516 bert/encoder/layer_9/output/LayerNorm/moments/mean" [id=1516, type=ReduceMean]; +"1517 bert/encoder/layer_9/output/LayerNorm/moments/StopGradient" [id=1517, type=Identity]; +"1518 bert/encoder/layer_9/output/LayerNorm/moments/SquaredDifference" [id=1518, type=Sub]; +"1519 bert/encoder/layer_9/output/LayerNorm/moments/SquaredDifference__439" [id=1519, type=Mul]; +"1520 bert/encoder/layer_9/output/LayerNorm/moments/variance" [id=1520, type=ReduceMean]; +"1521 bert/encoder/layer_9/output/LayerNorm/batchnorm/add" [id=1521, type=Add]; +"1522 bert/encoder/layer_9/output/LayerNorm/batchnorm/Rsqrt" [id=1522, type=Sqrt]; +"1523 bert/encoder/layer_9/output/LayerNorm/batchnorm/Rsqrt__441" [id=1523, type=Reciprocal]; +"1524 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul" [id=1524, type=Mul]; +"1525 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_2" [id=1525, type=Mul]; +"1526 bert/encoder/layer_9/output/LayerNorm/batchnorm/sub" [id=1526, type=Sub]; +"1527 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_1" [id=1527, type=Mul]; +"1528 bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1" [id=1528, type=Add]; +"1529 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_1" [id=1529, label="1529 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1530 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_1" [id=1530, label="1530 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1531 QuantizeLinear_bert/encoder/layer_10/attention/self/value/kernel^0_1" [id=1531, label="1531 QuantizeLinear_bert/encoder/layer_10/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"1532 DequantizeLinear_bert/encoder/layer_10/attention/self/value/kernel^0_1" [id=1532, label="1532 DequantizeLinear_bert/encoder/layer_10/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"1533 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_2" [id=1533, label="1533 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"1534 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_2" [id=1534, label="1534 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"1535 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_3" [id=1535, label="1535 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"1536 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_3" [id=1536, label="1536 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"1537 bert/encoder/layer_10/attention/self/value/MatMul" [id=1537, type=MatMul]; +"1538 bert/encoder/layer_10/attention/self/value/BiasAdd" [id=1538, type=Add]; +"1539 bert/encoder/layer_10/attention/self/Reshape_2" [id=1539, type=Reshape]; +"1540 bert/encoder/layer_10/attention/self/transpose_2" [id=1540, type=Transpose]; +"1541 QuantizeLinear_bert/encoder/layer_10/attention/self/query/kernel^0_1" [id=1541, label="1541 QuantizeLinear_bert/encoder/layer_10/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"1542 DequantizeLinear_bert/encoder/layer_10/attention/self/query/kernel^0_1" [id=1542, label="1542 DequantizeLinear_bert/encoder/layer_10/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"1543 bert/encoder/layer_10/attention/self/query/MatMul" [id=1543, type=MatMul]; +"1544 bert/encoder/layer_10/attention/self/query/BiasAdd" [id=1544, type=Add]; +"1545 QuantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd^0_1" [id=1545, label="1545 QuantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"1546 DequantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd^0_1" [id=1546, label="1546 DequantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"1547 bert/encoder/layer_10/attention/self/Reshape" [id=1547, type=Reshape]; +"1548 bert/encoder/layer_10/attention/self/transpose" [id=1548, type=Transpose]; +"1549 QuantizeLinear_bert/encoder/layer_10/attention/self/key/kernel^0_1" [id=1549, label="1549 QuantizeLinear_bert/encoder/layer_10/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"1550 DequantizeLinear_bert/encoder/layer_10/attention/self/key/kernel^0_1" [id=1550, label="1550 DequantizeLinear_bert/encoder/layer_10/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"1551 bert/encoder/layer_10/attention/self/key/MatMul" [id=1551, type=MatMul]; +"1552 bert/encoder/layer_10/attention/self/key/BiasAdd" [id=1552, type=Add]; +"1553 QuantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd^0_1" [id=1553, label="1553 QuantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"1554 DequantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd^0_1" [id=1554, label="1554 DequantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"1555 bert/encoder/layer_10/attention/self/Reshape_1" [id=1555, type=Reshape]; +"1556 bert/encoder/layer_10/attention/self/transpose_1" [id=1556, type=Transpose]; +"1557 bert/encoder/layer_10/attention/self/MatMul__446" [id=1557, type=Transpose]; +"1558 bert/encoder/layer_10/attention/self/MatMul" [id=1558, type=MatMul]; +"1559 bert/encoder/layer_10/attention/self/Mul" [id=1559, type=Mul]; +"1560 bert/encoder/layer_10/attention/self/add" [id=1560, type=Add]; +"1561 Shape_nncf_1259" [id=1561, type=Shape]; +"1562 Flatten_nncf_1260" [id=1562, type=Flatten]; +"1563 bert/encoder/layer_10/attention/self/Softmax" [id=1563, type=Softmax]; +"1564 Reshape_nncf_1262" [id=1564, type=Reshape]; +"1565 bert/encoder/layer_10/attention/self/MatMul_1" [id=1565, type=MatMul]; +"1566 QuantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1^0_1" [id=1566, label="1566 QuantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"1567 DequantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1^0_1" [id=1567, label="1567 DequantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"1568 bert/encoder/layer_10/attention/self/transpose_3" [id=1568, type=Transpose]; +"1569 bert/encoder/layer_10/attention/self/Reshape_3" [id=1569, type=Reshape]; +"1570 QuantizeLinear_bert/encoder/layer_10/attention/output/dense/kernel^0_1" [id=1570, label="1570 QuantizeLinear_bert/encoder/layer_10/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"1571 DequantizeLinear_bert/encoder/layer_10/attention/output/dense/kernel^0_1" [id=1571, label="1571 DequantizeLinear_bert/encoder/layer_10/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"1572 bert/encoder/layer_10/attention/output/dense/MatMul" [id=1572, type=MatMul]; +"1573 bert/encoder/layer_10/attention/output/dense/BiasAdd" [id=1573, type=Add]; +"1574 bert/encoder/layer_10/attention/output/add" [id=1574, type=Add]; +"1575 bert/encoder/layer_10/attention/output/LayerNorm/moments/mean" [id=1575, type=ReduceMean]; +"1576 bert/encoder/layer_10/attention/output/LayerNorm/moments/StopGradient" [id=1576, type=Identity]; +"1577 bert/encoder/layer_10/attention/output/LayerNorm/moments/SquaredDifference" [id=1577, type=Sub]; +"1578 bert/encoder/layer_10/attention/output/LayerNorm/moments/SquaredDifference__449" [id=1578, type=Mul]; +"1579 bert/encoder/layer_10/attention/output/LayerNorm/moments/variance" [id=1579, type=ReduceMean]; +"1580 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add" [id=1580, type=Add]; +"1581 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/Rsqrt" [id=1581, type=Sqrt]; +"1582 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/Rsqrt__451" [id=1582, type=Reciprocal]; +"1583 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul" [id=1583, type=Mul]; +"1584 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_2" [id=1584, type=Mul]; +"1585 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/sub" [id=1585, type=Sub]; +"1586 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_1" [id=1586, type=Mul]; +"1587 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1" [id=1587, type=Add]; +"1588 QuantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1588, label="1588 QuantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1589 DequantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1589, label="1589 DequantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1590 QuantizeLinear_bert/encoder/layer_10/intermediate/dense/kernel^0_1" [id=1590, label="1590 QuantizeLinear_bert/encoder/layer_10/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"1591 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/kernel^0_1" [id=1591, label="1591 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"1592 bert/encoder/layer_10/intermediate/dense/MatMul" [id=1592, type=MatMul]; +"1593 bert/encoder/layer_10/intermediate/dense/BiasAdd" [id=1593, type=Add]; +"1594 bert/encoder/layer_10/intermediate/dense/Pow" [id=1594, type=Pow]; +"1595 bert/encoder/layer_10/intermediate/dense/mul" [id=1595, type=Mul]; +"1596 bert/encoder/layer_10/intermediate/dense/add" [id=1596, type=Add]; +"1597 bert/encoder/layer_10/intermediate/dense/mul_1" [id=1597, type=Mul]; +"1598 bert/encoder/layer_10/intermediate/dense/Tanh" [id=1598, type=Tanh]; +"1599 bert/encoder/layer_10/intermediate/dense/add_1" [id=1599, type=Add]; +"1600 bert/encoder/layer_10/intermediate/dense/mul_2" [id=1600, type=Mul]; +"1601 bert/encoder/layer_10/intermediate/dense/mul_3" [id=1601, type=Mul]; +"1602 QuantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3^0_1" [id=1602, label="1602 QuantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"1603 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3^0_1" [id=1603, label="1603 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"1604 QuantizeLinear_bert/encoder/layer_10/output/dense/kernel^0_1" [id=1604, label="1604 QuantizeLinear_bert/encoder/layer_10/output/dense/kernel:0_1", type=QuantizeLinear]; +"1605 DequantizeLinear_bert/encoder/layer_10/output/dense/kernel^0_1" [id=1605, label="1605 DequantizeLinear_bert/encoder/layer_10/output/dense/kernel:0_1", type=DequantizeLinear]; +"1606 bert/encoder/layer_10/output/dense/MatMul" [id=1606, type=MatMul]; +"1607 bert/encoder/layer_10/output/dense/BiasAdd" [id=1607, type=Add]; +"1608 bert/encoder/layer_10/output/add" [id=1608, type=Add]; +"1609 bert/encoder/layer_10/output/LayerNorm/moments/mean" [id=1609, type=ReduceMean]; +"1610 bert/encoder/layer_10/output/LayerNorm/moments/StopGradient" [id=1610, type=Identity]; +"1611 bert/encoder/layer_10/output/LayerNorm/moments/SquaredDifference" [id=1611, type=Sub]; +"1612 bert/encoder/layer_10/output/LayerNorm/moments/SquaredDifference__453" [id=1612, type=Mul]; +"1613 bert/encoder/layer_10/output/LayerNorm/moments/variance" [id=1613, type=ReduceMean]; +"1614 bert/encoder/layer_10/output/LayerNorm/batchnorm/add" [id=1614, type=Add]; +"1615 bert/encoder/layer_10/output/LayerNorm/batchnorm/Rsqrt" [id=1615, type=Sqrt]; +"1616 bert/encoder/layer_10/output/LayerNorm/batchnorm/Rsqrt__455" [id=1616, type=Reciprocal]; +"1617 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul" [id=1617, type=Mul]; +"1618 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_2" [id=1618, type=Mul]; +"1619 bert/encoder/layer_10/output/LayerNorm/batchnorm/sub" [id=1619, type=Sub]; +"1620 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_1" [id=1620, type=Mul]; +"1621 bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1" [id=1621, type=Add]; +"1622 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_1" [id=1622, label="1622 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1623 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_1" [id=1623, label="1623 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1624 QuantizeLinear_bert/encoder/layer_11/attention/self/value/kernel^0_1" [id=1624, label="1624 QuantizeLinear_bert/encoder/layer_11/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"1625 DequantizeLinear_bert/encoder/layer_11/attention/self/value/kernel^0_1" [id=1625, label="1625 DequantizeLinear_bert/encoder/layer_11/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"1626 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_2" [id=1626, label="1626 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"1627 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_2" [id=1627, label="1627 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"1628 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_3" [id=1628, label="1628 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"1629 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_3" [id=1629, label="1629 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"1630 bert/encoder/layer_11/attention/self/value/MatMul" [id=1630, type=MatMul]; +"1631 bert/encoder/layer_11/attention/self/value/BiasAdd" [id=1631, type=Add]; +"1632 bert/encoder/layer_11/attention/self/Reshape_2" [id=1632, type=Reshape]; +"1633 bert/encoder/layer_11/attention/self/transpose_2" [id=1633, type=Transpose]; +"1634 QuantizeLinear_bert/encoder/layer_11/attention/self/query/kernel^0_1" [id=1634, label="1634 QuantizeLinear_bert/encoder/layer_11/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"1635 DequantizeLinear_bert/encoder/layer_11/attention/self/query/kernel^0_1" [id=1635, label="1635 DequantizeLinear_bert/encoder/layer_11/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"1636 bert/encoder/layer_11/attention/self/query/MatMul" [id=1636, type=MatMul]; +"1637 bert/encoder/layer_11/attention/self/query/BiasAdd" [id=1637, type=Add]; +"1638 QuantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd^0_1" [id=1638, label="1638 QuantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"1639 DequantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd^0_1" [id=1639, label="1639 DequantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"1640 bert/encoder/layer_11/attention/self/Reshape" [id=1640, type=Reshape]; +"1641 bert/encoder/layer_11/attention/self/transpose" [id=1641, type=Transpose]; +"1642 QuantizeLinear_bert/encoder/layer_11/attention/self/key/kernel^0_1" [id=1642, label="1642 QuantizeLinear_bert/encoder/layer_11/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"1643 DequantizeLinear_bert/encoder/layer_11/attention/self/key/kernel^0_1" [id=1643, label="1643 DequantizeLinear_bert/encoder/layer_11/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"1644 bert/encoder/layer_11/attention/self/key/MatMul" [id=1644, type=MatMul]; +"1645 bert/encoder/layer_11/attention/self/key/BiasAdd" [id=1645, type=Add]; +"1646 QuantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd^0_1" [id=1646, label="1646 QuantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"1647 DequantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd^0_1" [id=1647, label="1647 DequantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"1648 bert/encoder/layer_11/attention/self/Reshape_1" [id=1648, type=Reshape]; +"1649 bert/encoder/layer_11/attention/self/transpose_1" [id=1649, type=Transpose]; +"1650 bert/encoder/layer_11/attention/self/MatMul__460" [id=1650, type=Transpose]; +"1651 bert/encoder/layer_11/attention/self/MatMul" [id=1651, type=MatMul]; +"1652 bert/encoder/layer_11/attention/self/Mul" [id=1652, type=Mul]; +"1653 bert/encoder/layer_11/attention/self/add" [id=1653, type=Add]; +"1654 Shape_nncf_1324" [id=1654, type=Shape]; +"1655 Flatten_nncf_1325" [id=1655, type=Flatten]; +"1656 bert/encoder/layer_11/attention/self/Softmax" [id=1656, type=Softmax]; +"1657 Reshape_nncf_1327" [id=1657, type=Reshape]; +"1658 bert/encoder/layer_11/attention/self/MatMul_1" [id=1658, type=MatMul]; +"1659 QuantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1^0_1" [id=1659, label="1659 QuantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"1660 DequantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1^0_1" [id=1660, label="1660 DequantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"1661 bert/encoder/layer_11/attention/self/transpose_3" [id=1661, type=Transpose]; +"1662 bert/encoder/layer_11/attention/self/Reshape_3" [id=1662, type=Reshape]; +"1663 QuantizeLinear_bert/encoder/layer_11/attention/output/dense/kernel^0_1" [id=1663, label="1663 QuantizeLinear_bert/encoder/layer_11/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"1664 DequantizeLinear_bert/encoder/layer_11/attention/output/dense/kernel^0_1" [id=1664, label="1664 DequantizeLinear_bert/encoder/layer_11/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"1665 bert/encoder/layer_11/attention/output/dense/MatMul" [id=1665, type=MatMul]; +"1666 bert/encoder/layer_11/attention/output/dense/BiasAdd" [id=1666, type=Add]; +"1667 bert/encoder/layer_11/attention/output/add" [id=1667, type=Add]; +"1668 bert/encoder/layer_11/attention/output/LayerNorm/moments/mean" [id=1668, type=ReduceMean]; +"1669 bert/encoder/layer_11/attention/output/LayerNorm/moments/StopGradient" [id=1669, type=Identity]; +"1670 bert/encoder/layer_11/attention/output/LayerNorm/moments/SquaredDifference" [id=1670, type=Sub]; +"1671 bert/encoder/layer_11/attention/output/LayerNorm/moments/SquaredDifference__463" [id=1671, type=Mul]; +"1672 bert/encoder/layer_11/attention/output/LayerNorm/moments/variance" [id=1672, type=ReduceMean]; +"1673 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add" [id=1673, type=Add]; +"1674 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/Rsqrt" [id=1674, type=Sqrt]; +"1675 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/Rsqrt__465" [id=1675, type=Reciprocal]; +"1676 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul" [id=1676, type=Mul]; +"1677 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_2" [id=1677, type=Mul]; +"1678 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/sub" [id=1678, type=Sub]; +"1679 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_1" [id=1679, type=Mul]; +"1680 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1" [id=1680, type=Add]; +"1681 QuantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1681, label="1681 QuantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1682 DequantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1682, label="1682 DequantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1683 QuantizeLinear_bert/encoder/layer_11/intermediate/dense/kernel^0_1" [id=1683, label="1683 QuantizeLinear_bert/encoder/layer_11/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"1684 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/kernel^0_1" [id=1684, label="1684 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"1685 bert/encoder/layer_11/intermediate/dense/MatMul" [id=1685, type=MatMul]; +"1686 bert/encoder/layer_11/intermediate/dense/BiasAdd" [id=1686, type=Add]; +"1687 bert/encoder/layer_11/intermediate/dense/Pow" [id=1687, type=Pow]; +"1688 bert/encoder/layer_11/intermediate/dense/mul" [id=1688, type=Mul]; +"1689 bert/encoder/layer_11/intermediate/dense/add" [id=1689, type=Add]; +"1690 bert/encoder/layer_11/intermediate/dense/mul_1" [id=1690, type=Mul]; +"1691 bert/encoder/layer_11/intermediate/dense/Tanh" [id=1691, type=Tanh]; +"1692 bert/encoder/layer_11/intermediate/dense/add_1" [id=1692, type=Add]; +"1693 bert/encoder/layer_11/intermediate/dense/mul_2" [id=1693, type=Mul]; +"1694 bert/encoder/layer_11/intermediate/dense/mul_3" [id=1694, type=Mul]; +"1695 QuantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3^0_1" [id=1695, label="1695 QuantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"1696 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3^0_1" [id=1696, label="1696 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"1697 QuantizeLinear_bert/encoder/layer_11/output/dense/kernel^0_1" [id=1697, label="1697 QuantizeLinear_bert/encoder/layer_11/output/dense/kernel:0_1", type=QuantizeLinear]; +"1698 DequantizeLinear_bert/encoder/layer_11/output/dense/kernel^0_1" [id=1698, label="1698 DequantizeLinear_bert/encoder/layer_11/output/dense/kernel:0_1", type=DequantizeLinear]; +"1699 bert/encoder/layer_11/output/dense/MatMul" [id=1699, type=MatMul]; +"1700 bert/encoder/layer_11/output/dense/BiasAdd" [id=1700, type=Add]; +"1701 bert/encoder/layer_11/output/add" [id=1701, type=Add]; +"1702 bert/encoder/layer_11/output/LayerNorm/moments/mean" [id=1702, type=ReduceMean]; +"1703 bert/encoder/layer_11/output/LayerNorm/moments/StopGradient" [id=1703, type=Identity]; +"1704 bert/encoder/layer_11/output/LayerNorm/moments/SquaredDifference" [id=1704, type=Sub]; +"1705 bert/encoder/layer_11/output/LayerNorm/moments/SquaredDifference__467" [id=1705, type=Mul]; +"1706 bert/encoder/layer_11/output/LayerNorm/moments/variance" [id=1706, type=ReduceMean]; +"1707 bert/encoder/layer_11/output/LayerNorm/batchnorm/add" [id=1707, type=Add]; +"1708 bert/encoder/layer_11/output/LayerNorm/batchnorm/Rsqrt" [id=1708, type=Sqrt]; +"1709 bert/encoder/layer_11/output/LayerNorm/batchnorm/Rsqrt__469" [id=1709, type=Reciprocal]; +"1710 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul" [id=1710, type=Mul]; +"1711 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_2" [id=1711, type=Mul]; +"1712 bert/encoder/layer_11/output/LayerNorm/batchnorm/sub" [id=1712, type=Sub]; +"1713 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_1" [id=1713, type=Mul]; +"1714 bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1" [id=1714, type=Add]; +"1715 QuantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1^0_1" [id=1715, label="1715 QuantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1716 DequantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1^0_1" [id=1716, label="1716 DequantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1717 bert/encoder/Reshape_13" [id=1717, type=Reshape]; +"1718 Shape_1" [id=1718, type=Shape]; +"1719 Shape_1__472" [id=1719, type=Cast]; +"1720 strided_slice_1" [id=1720, type=Slice]; +"1721 Constant_nncf_1377" [id=1721, type=Constant]; +"1722 strided_slice_1__476" [id=1722, type=Squeeze]; +"1723 strided_slice_1__477" [id=1723, type=Cast]; +"1724 mul" [id=1724, type=Mul]; +"1725 Constant_nncf_1381" [id=1725, type=Constant]; +"1726 Reshape/shape_Unsqueeze__482" [id=1726, type=Unsqueeze]; +"1727 Reshape/shape_Concat__484" [id=1727, type=Concat]; +"1728 Reshape__485" [id=1728, type=Cast]; +"1729 Constant_nncf_1385" [id=1729, type=Constant]; +"1730 Reshape_1/shape_Unsqueeze__478" [id=1730, type=Unsqueeze]; +"1731 Reshape_1/shape_Concat__481" [id=1731, type=Concat]; +"1732 Reshape_1__487" [id=1732, type=Cast]; +"1733 Reshape" [id=1733, type=Reshape]; +"1734 QuantizeLinear_MatMul__486^0_1" [id=1734, label="1734 QuantizeLinear_MatMul__486:0_1", type=QuantizeLinear]; +"1735 DequantizeLinear_MatMul__486^0_1" [id=1735, label="1735 DequantizeLinear_MatMul__486:0_1", type=DequantizeLinear]; +"1736 MatMul" [id=1736, type=MatMul]; +"1737 BiasAdd" [id=1737, type=Add]; +"1738 Reshape_1" [id=1738, type=Reshape]; +"1739 transpose" [id=1739, type=Transpose]; +"1740 unstack" [id=1740, type=Split]; +"1741 Constant_nncf_1395" [id=1741, type=Constant]; +"1742 unstack__490" [id=1742, type=Squeeze]; +"1743 unstack_graph_outputs_Identity__4" [id=1743, type=Identity]; +"1744 Constant_nncf_1398" [id=1744, type=Constant]; +"1745 unstack__488" [id=1745, type=Squeeze]; +"1746 unstack_graph_outputs_Identity__7" [id=1746, type=Identity]; +"1747 nncf_model_input_0" [id=1747, type=nncf_model_input]; +"1748 nncf_model_input_1" [id=1748, type=nncf_model_input]; +"1749 nncf_model_input_2" [id=1749, type=nncf_model_input]; +"1750 nncf_model_input_3" [id=1750, type=nncf_model_input]; +"1751 nncf_model_output_0" [id=1751, type=nncf_model_output]; +"1752 nncf_model_output_1" [id=1752, type=nncf_model_output]; +"1753 nncf_model_output_2" [id=1753, type=nncf_model_output]; +"0 unique_ids_graph_outputs_Identity__10" -> "1753 nncf_model_output_2" [label="[-1]", style=dashed]; +"1 Constant_nncf_1" -> "2 bert/encoder/ones/packed_Unsqueeze__20" [label="[1]", style=dashed]; +"2 bert/encoder/ones/packed_Unsqueeze__20" -> "253 bert/encoder/ones/packed_Concat__21" [label="[1]", style=dashed]; +"3 Constant_nncf_3" -> "4 bert/encoder/ones/packed_Unsqueeze__19" [label="[1]", style=dashed]; +"4 bert/encoder/ones/packed_Unsqueeze__19" -> "253 bert/encoder/ones/packed_Concat__21" [label="[1]", style=dashed]; +"5 Constant_nncf_5" -> "6 bert/encoder/layer_9/attention/self/Reshape_3/shape_Unsqueeze__83" [label="[1]", style=dashed]; +"6 bert/encoder/layer_9/attention/self/Reshape_3/shape_Unsqueeze__83" -> "393 bert/encoder/layer_9/attention/self/Reshape_3/shape_Concat__84" [label="[1]", style=dashed]; +"7 Constant_nncf_7" -> "8 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__88" [label="[1]", style=dashed]; +"8 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__88" -> "397 bert/encoder/layer_9/attention/self/Reshape_2/shape_Concat__89" [label="[1]", style=dashed]; +"9 Constant_nncf_9" -> "10 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__87" [label="[1]", style=dashed]; +"10 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__87" -> "397 bert/encoder/layer_9/attention/self/Reshape_2/shape_Concat__89" [label="[1]", style=dashed]; +"11 Constant_nncf_11" -> "12 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__86" [label="[1]", style=dashed]; +"12 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__86" -> "397 bert/encoder/layer_9/attention/self/Reshape_2/shape_Concat__89" [label="[1]", style=dashed]; +"13 Constant_nncf_13" -> "14 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__93" [label="[1]", style=dashed]; +"14 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__93" -> "401 bert/encoder/layer_9/attention/self/Reshape_1/shape_Concat__94" [label="[1]", style=dashed]; +"15 Constant_nncf_15" -> "16 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__92" [label="[1]", style=dashed]; +"16 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__92" -> "401 bert/encoder/layer_9/attention/self/Reshape_1/shape_Concat__94" [label="[1]", style=dashed]; +"17 Constant_nncf_17" -> "18 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__91" [label="[1]", style=dashed]; +"18 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__91" -> "401 bert/encoder/layer_9/attention/self/Reshape_1/shape_Concat__94" [label="[1]", style=dashed]; +"19 Constant_nncf_19" -> "20 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__98" [label="[1]", style=dashed]; +"20 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__98" -> "405 bert/encoder/layer_9/attention/self/Reshape/shape_Concat__99" [label="[1]", style=dashed]; +"21 Constant_nncf_21" -> "22 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__97" [label="[1]", style=dashed]; +"22 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__97" -> "405 bert/encoder/layer_9/attention/self/Reshape/shape_Concat__99" [label="[1]", style=dashed]; +"23 Constant_nncf_23" -> "24 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__96" [label="[1]", style=dashed]; +"24 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__96" -> "405 bert/encoder/layer_9/attention/self/Reshape/shape_Concat__99" [label="[1]", style=dashed]; +"25 Constant_nncf_25" -> "26 bert/encoder/layer_8/attention/self/Reshape_3/shape_Unsqueeze__101" [label="[1]", style=dashed]; +"26 bert/encoder/layer_8/attention/self/Reshape_3/shape_Unsqueeze__101" -> "410 bert/encoder/layer_8/attention/self/Reshape_3/shape_Concat__102" [label="[1]", style=dashed]; +"27 Constant_nncf_27" -> "28 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__106" [label="[1]", style=dashed]; +"28 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__106" -> "414 bert/encoder/layer_8/attention/self/Reshape_2/shape_Concat__107" [label="[1]", style=dashed]; +"29 Constant_nncf_29" -> "30 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__105" [label="[1]", style=dashed]; +"30 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__105" -> "414 bert/encoder/layer_8/attention/self/Reshape_2/shape_Concat__107" [label="[1]", style=dashed]; +"31 Constant_nncf_31" -> "32 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__104" [label="[1]", style=dashed]; +"32 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__104" -> "414 bert/encoder/layer_8/attention/self/Reshape_2/shape_Concat__107" [label="[1]", style=dashed]; +"33 Constant_nncf_33" -> "34 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__111" [label="[1]", style=dashed]; +"34 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__111" -> "418 bert/encoder/layer_8/attention/self/Reshape_1/shape_Concat__112" [label="[1]", style=dashed]; +"35 Constant_nncf_35" -> "36 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__110" [label="[1]", style=dashed]; +"36 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__110" -> "418 bert/encoder/layer_8/attention/self/Reshape_1/shape_Concat__112" [label="[1]", style=dashed]; +"37 Constant_nncf_37" -> "38 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__109" [label="[1]", style=dashed]; +"38 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__109" -> "418 bert/encoder/layer_8/attention/self/Reshape_1/shape_Concat__112" [label="[1]", style=dashed]; +"39 Constant_nncf_39" -> "40 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__116" [label="[1]", style=dashed]; +"40 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__116" -> "422 bert/encoder/layer_8/attention/self/Reshape/shape_Concat__117" [label="[1]", style=dashed]; +"41 Constant_nncf_41" -> "42 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__115" [label="[1]", style=dashed]; +"42 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__115" -> "422 bert/encoder/layer_8/attention/self/Reshape/shape_Concat__117" [label="[1]", style=dashed]; +"43 Constant_nncf_43" -> "44 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__114" [label="[1]", style=dashed]; +"44 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__114" -> "422 bert/encoder/layer_8/attention/self/Reshape/shape_Concat__117" [label="[1]", style=dashed]; +"45 Constant_nncf_45" -> "46 bert/encoder/layer_7/attention/self/Reshape_3/shape_Unsqueeze__119" [label="[1]", style=dashed]; +"46 bert/encoder/layer_7/attention/self/Reshape_3/shape_Unsqueeze__119" -> "427 bert/encoder/layer_7/attention/self/Reshape_3/shape_Concat__120" [label="[1]", style=dashed]; +"47 Constant_nncf_47" -> "48 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__124" [label="[1]", style=dashed]; +"48 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__124" -> "431 bert/encoder/layer_7/attention/self/Reshape_2/shape_Concat__125" [label="[1]", style=dashed]; +"49 Constant_nncf_49" -> "50 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__123" [label="[1]", style=dashed]; +"50 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__123" -> "431 bert/encoder/layer_7/attention/self/Reshape_2/shape_Concat__125" [label="[1]", style=dashed]; +"51 Constant_nncf_51" -> "52 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__122" [label="[1]", style=dashed]; +"52 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__122" -> "431 bert/encoder/layer_7/attention/self/Reshape_2/shape_Concat__125" [label="[1]", style=dashed]; +"53 Constant_nncf_53" -> "54 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__129" [label="[1]", style=dashed]; +"54 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__129" -> "435 bert/encoder/layer_7/attention/self/Reshape_1/shape_Concat__130" [label="[1]", style=dashed]; +"55 Constant_nncf_55" -> "56 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__128" [label="[1]", style=dashed]; +"56 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__128" -> "435 bert/encoder/layer_7/attention/self/Reshape_1/shape_Concat__130" [label="[1]", style=dashed]; +"57 Constant_nncf_57" -> "58 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__127" [label="[1]", style=dashed]; +"58 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__127" -> "435 bert/encoder/layer_7/attention/self/Reshape_1/shape_Concat__130" [label="[1]", style=dashed]; +"59 Constant_nncf_59" -> "60 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__134" [label="[1]", style=dashed]; +"60 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__134" -> "439 bert/encoder/layer_7/attention/self/Reshape/shape_Concat__135" [label="[1]", style=dashed]; +"61 Constant_nncf_61" -> "62 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__133" [label="[1]", style=dashed]; +"62 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__133" -> "439 bert/encoder/layer_7/attention/self/Reshape/shape_Concat__135" [label="[1]", style=dashed]; +"63 Constant_nncf_63" -> "64 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__132" [label="[1]", style=dashed]; +"64 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__132" -> "439 bert/encoder/layer_7/attention/self/Reshape/shape_Concat__135" [label="[1]", style=dashed]; +"65 Constant_nncf_65" -> "66 bert/encoder/layer_6/attention/self/Reshape_3/shape_Unsqueeze__137" [label="[1]", style=dashed]; +"66 bert/encoder/layer_6/attention/self/Reshape_3/shape_Unsqueeze__137" -> "444 bert/encoder/layer_6/attention/self/Reshape_3/shape_Concat__138" [label="[1]", style=dashed]; +"67 Constant_nncf_67" -> "68 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__142" [label="[1]", style=dashed]; +"68 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__142" -> "448 bert/encoder/layer_6/attention/self/Reshape_2/shape_Concat__143" [label="[1]", style=dashed]; +"69 Constant_nncf_69" -> "70 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__141" [label="[1]", style=dashed]; +"70 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__141" -> "448 bert/encoder/layer_6/attention/self/Reshape_2/shape_Concat__143" [label="[1]", style=dashed]; +"71 Constant_nncf_71" -> "72 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__140" [label="[1]", style=dashed]; +"72 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__140" -> "448 bert/encoder/layer_6/attention/self/Reshape_2/shape_Concat__143" [label="[1]", style=dashed]; +"73 Constant_nncf_73" -> "74 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__147" [label="[1]", style=dashed]; +"74 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__147" -> "452 bert/encoder/layer_6/attention/self/Reshape_1/shape_Concat__148" [label="[1]", style=dashed]; +"75 Constant_nncf_75" -> "76 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__146" [label="[1]", style=dashed]; +"76 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__146" -> "452 bert/encoder/layer_6/attention/self/Reshape_1/shape_Concat__148" [label="[1]", style=dashed]; +"77 Constant_nncf_77" -> "78 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__145" [label="[1]", style=dashed]; +"78 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__145" -> "452 bert/encoder/layer_6/attention/self/Reshape_1/shape_Concat__148" [label="[1]", style=dashed]; +"79 Constant_nncf_79" -> "80 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__152" [label="[1]", style=dashed]; +"80 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__152" -> "456 bert/encoder/layer_6/attention/self/Reshape/shape_Concat__153" [label="[1]", style=dashed]; +"81 Constant_nncf_81" -> "82 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__151" [label="[1]", style=dashed]; +"82 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__151" -> "456 bert/encoder/layer_6/attention/self/Reshape/shape_Concat__153" [label="[1]", style=dashed]; +"83 Constant_nncf_83" -> "84 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__150" [label="[1]", style=dashed]; +"84 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__150" -> "456 bert/encoder/layer_6/attention/self/Reshape/shape_Concat__153" [label="[1]", style=dashed]; +"85 Constant_nncf_85" -> "86 bert/encoder/layer_5/attention/self/Reshape_3/shape_Unsqueeze__155" [label="[1]", style=dashed]; +"86 bert/encoder/layer_5/attention/self/Reshape_3/shape_Unsqueeze__155" -> "461 bert/encoder/layer_5/attention/self/Reshape_3/shape_Concat__156" [label="[1]", style=dashed]; +"87 Constant_nncf_87" -> "88 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__160" [label="[1]", style=dashed]; +"88 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__160" -> "465 bert/encoder/layer_5/attention/self/Reshape_2/shape_Concat__161" [label="[1]", style=dashed]; +"89 Constant_nncf_89" -> "90 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__159" [label="[1]", style=dashed]; +"90 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__159" -> "465 bert/encoder/layer_5/attention/self/Reshape_2/shape_Concat__161" [label="[1]", style=dashed]; +"91 Constant_nncf_91" -> "92 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__158" [label="[1]", style=dashed]; +"92 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__158" -> "465 bert/encoder/layer_5/attention/self/Reshape_2/shape_Concat__161" [label="[1]", style=dashed]; +"93 Constant_nncf_93" -> "94 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__165" [label="[1]", style=dashed]; +"94 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__165" -> "469 bert/encoder/layer_5/attention/self/Reshape_1/shape_Concat__166" [label="[1]", style=dashed]; +"95 Constant_nncf_95" -> "96 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__164" [label="[1]", style=dashed]; +"96 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__164" -> "469 bert/encoder/layer_5/attention/self/Reshape_1/shape_Concat__166" [label="[1]", style=dashed]; +"97 Constant_nncf_97" -> "98 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__163" [label="[1]", style=dashed]; +"98 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__163" -> "469 bert/encoder/layer_5/attention/self/Reshape_1/shape_Concat__166" [label="[1]", style=dashed]; +"99 Constant_nncf_99" -> "100 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__170" [label="[1]", style=dashed]; +"100 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__170" -> "473 bert/encoder/layer_5/attention/self/Reshape/shape_Concat__171" [label="[1]", style=dashed]; +"101 Constant_nncf_101" -> "102 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__169" [label="[1]", style=dashed]; +"102 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__169" -> "473 bert/encoder/layer_5/attention/self/Reshape/shape_Concat__171" [label="[1]", style=dashed]; +"103 Constant_nncf_103" -> "104 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__168" [label="[1]", style=dashed]; +"104 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__168" -> "473 bert/encoder/layer_5/attention/self/Reshape/shape_Concat__171" [label="[1]", style=dashed]; +"105 Constant_nncf_105" -> "106 bert/encoder/layer_4/attention/self/Reshape_3/shape_Unsqueeze__173" [label="[1]", style=dashed]; +"106 bert/encoder/layer_4/attention/self/Reshape_3/shape_Unsqueeze__173" -> "478 bert/encoder/layer_4/attention/self/Reshape_3/shape_Concat__174" [label="[1]", style=dashed]; +"107 Constant_nncf_107" -> "108 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__178" [label="[1]", style=dashed]; +"108 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__178" -> "482 bert/encoder/layer_4/attention/self/Reshape_2/shape_Concat__179" [label="[1]", style=dashed]; +"109 Constant_nncf_109" -> "110 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__177" [label="[1]", style=dashed]; +"110 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__177" -> "482 bert/encoder/layer_4/attention/self/Reshape_2/shape_Concat__179" [label="[1]", style=dashed]; +"111 Constant_nncf_111" -> "112 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__176" [label="[1]", style=dashed]; +"112 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__176" -> "482 bert/encoder/layer_4/attention/self/Reshape_2/shape_Concat__179" [label="[1]", style=dashed]; +"113 Constant_nncf_113" -> "114 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__183" [label="[1]", style=dashed]; +"114 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__183" -> "486 bert/encoder/layer_4/attention/self/Reshape_1/shape_Concat__184" [label="[1]", style=dashed]; +"115 Constant_nncf_115" -> "116 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__182" [label="[1]", style=dashed]; +"116 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__182" -> "486 bert/encoder/layer_4/attention/self/Reshape_1/shape_Concat__184" [label="[1]", style=dashed]; +"117 Constant_nncf_117" -> "118 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__181" [label="[1]", style=dashed]; +"118 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__181" -> "486 bert/encoder/layer_4/attention/self/Reshape_1/shape_Concat__184" [label="[1]", style=dashed]; +"119 Constant_nncf_119" -> "120 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__188" [label="[1]", style=dashed]; +"120 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__188" -> "490 bert/encoder/layer_4/attention/self/Reshape/shape_Concat__189" [label="[1]", style=dashed]; +"121 Constant_nncf_121" -> "122 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__187" [label="[1]", style=dashed]; +"122 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__187" -> "490 bert/encoder/layer_4/attention/self/Reshape/shape_Concat__189" [label="[1]", style=dashed]; +"123 Constant_nncf_123" -> "124 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__186" [label="[1]", style=dashed]; +"124 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__186" -> "490 bert/encoder/layer_4/attention/self/Reshape/shape_Concat__189" [label="[1]", style=dashed]; +"125 Constant_nncf_125" -> "126 bert/encoder/layer_3/attention/self/Reshape_3/shape_Unsqueeze__191" [label="[1]", style=dashed]; +"126 bert/encoder/layer_3/attention/self/Reshape_3/shape_Unsqueeze__191" -> "495 bert/encoder/layer_3/attention/self/Reshape_3/shape_Concat__192" [label="[1]", style=dashed]; +"127 Constant_nncf_127" -> "128 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__196" [label="[1]", style=dashed]; +"128 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__196" -> "499 bert/encoder/layer_3/attention/self/Reshape_2/shape_Concat__197" [label="[1]", style=dashed]; +"129 Constant_nncf_129" -> "130 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__195" [label="[1]", style=dashed]; +"130 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__195" -> "499 bert/encoder/layer_3/attention/self/Reshape_2/shape_Concat__197" [label="[1]", style=dashed]; +"131 Constant_nncf_131" -> "132 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__194" [label="[1]", style=dashed]; +"132 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__194" -> "499 bert/encoder/layer_3/attention/self/Reshape_2/shape_Concat__197" [label="[1]", style=dashed]; +"133 Constant_nncf_133" -> "134 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__201" [label="[1]", style=dashed]; +"134 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__201" -> "503 bert/encoder/layer_3/attention/self/Reshape_1/shape_Concat__202" [label="[1]", style=dashed]; +"135 Constant_nncf_135" -> "136 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__200" [label="[1]", style=dashed]; +"136 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__200" -> "503 bert/encoder/layer_3/attention/self/Reshape_1/shape_Concat__202" [label="[1]", style=dashed]; +"137 Constant_nncf_137" -> "138 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__199" [label="[1]", style=dashed]; +"138 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__199" -> "503 bert/encoder/layer_3/attention/self/Reshape_1/shape_Concat__202" [label="[1]", style=dashed]; +"139 Constant_nncf_139" -> "140 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__206" [label="[1]", style=dashed]; +"140 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__206" -> "507 bert/encoder/layer_3/attention/self/Reshape/shape_Concat__207" [label="[1]", style=dashed]; +"141 Constant_nncf_141" -> "142 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__205" [label="[1]", style=dashed]; +"142 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__205" -> "507 bert/encoder/layer_3/attention/self/Reshape/shape_Concat__207" [label="[1]", style=dashed]; +"143 Constant_nncf_143" -> "144 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__204" [label="[1]", style=dashed]; +"144 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__204" -> "507 bert/encoder/layer_3/attention/self/Reshape/shape_Concat__207" [label="[1]", style=dashed]; +"145 Constant_nncf_145" -> "146 bert/encoder/layer_2/attention/self/Reshape_3/shape_Unsqueeze__209" [label="[1]", style=dashed]; +"146 bert/encoder/layer_2/attention/self/Reshape_3/shape_Unsqueeze__209" -> "512 bert/encoder/layer_2/attention/self/Reshape_3/shape_Concat__210" [label="[1]", style=dashed]; +"147 Constant_nncf_147" -> "148 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__214" [label="[1]", style=dashed]; +"148 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__214" -> "516 bert/encoder/layer_2/attention/self/Reshape_2/shape_Concat__215" [label="[1]", style=dashed]; +"149 Constant_nncf_149" -> "150 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__213" [label="[1]", style=dashed]; +"150 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__213" -> "516 bert/encoder/layer_2/attention/self/Reshape_2/shape_Concat__215" [label="[1]", style=dashed]; +"151 Constant_nncf_151" -> "152 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__212" [label="[1]", style=dashed]; +"152 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__212" -> "516 bert/encoder/layer_2/attention/self/Reshape_2/shape_Concat__215" [label="[1]", style=dashed]; +"153 Constant_nncf_153" -> "154 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__219" [label="[1]", style=dashed]; +"154 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__219" -> "520 bert/encoder/layer_2/attention/self/Reshape_1/shape_Concat__220" [label="[1]", style=dashed]; +"155 Constant_nncf_155" -> "156 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__218" [label="[1]", style=dashed]; +"156 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__218" -> "520 bert/encoder/layer_2/attention/self/Reshape_1/shape_Concat__220" [label="[1]", style=dashed]; +"157 Constant_nncf_157" -> "158 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__217" [label="[1]", style=dashed]; +"158 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__217" -> "520 bert/encoder/layer_2/attention/self/Reshape_1/shape_Concat__220" [label="[1]", style=dashed]; +"159 Constant_nncf_159" -> "160 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__224" [label="[1]", style=dashed]; +"160 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__224" -> "524 bert/encoder/layer_2/attention/self/Reshape/shape_Concat__225" [label="[1]", style=dashed]; +"161 Constant_nncf_161" -> "162 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__223" [label="[1]", style=dashed]; +"162 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__223" -> "524 bert/encoder/layer_2/attention/self/Reshape/shape_Concat__225" [label="[1]", style=dashed]; +"163 Constant_nncf_163" -> "164 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__222" [label="[1]", style=dashed]; +"164 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__222" -> "524 bert/encoder/layer_2/attention/self/Reshape/shape_Concat__225" [label="[1]", style=dashed]; +"165 Constant_nncf_165" -> "166 bert/encoder/layer_11/attention/self/Reshape_3/shape_Unsqueeze__227" [label="[1]", style=dashed]; +"166 bert/encoder/layer_11/attention/self/Reshape_3/shape_Unsqueeze__227" -> "529 bert/encoder/layer_11/attention/self/Reshape_3/shape_Concat__228" [label="[1]", style=dashed]; +"167 Constant_nncf_167" -> "168 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__232" [label="[1]", style=dashed]; +"168 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__232" -> "533 bert/encoder/layer_11/attention/self/Reshape_2/shape_Concat__233" [label="[1]", style=dashed]; +"169 Constant_nncf_169" -> "170 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__231" [label="[1]", style=dashed]; +"170 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__231" -> "533 bert/encoder/layer_11/attention/self/Reshape_2/shape_Concat__233" [label="[1]", style=dashed]; +"171 Constant_nncf_171" -> "172 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__230" [label="[1]", style=dashed]; +"172 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__230" -> "533 bert/encoder/layer_11/attention/self/Reshape_2/shape_Concat__233" [label="[1]", style=dashed]; +"173 Constant_nncf_173" -> "174 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__237" [label="[1]", style=dashed]; +"174 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__237" -> "537 bert/encoder/layer_11/attention/self/Reshape_1/shape_Concat__238" [label="[1]", style=dashed]; +"175 Constant_nncf_175" -> "176 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__236" [label="[1]", style=dashed]; +"176 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__236" -> "537 bert/encoder/layer_11/attention/self/Reshape_1/shape_Concat__238" [label="[1]", style=dashed]; +"177 Constant_nncf_177" -> "178 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__235" [label="[1]", style=dashed]; +"178 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__235" -> "537 bert/encoder/layer_11/attention/self/Reshape_1/shape_Concat__238" [label="[1]", style=dashed]; +"179 Constant_nncf_179" -> "180 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__242" [label="[1]", style=dashed]; +"180 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__242" -> "541 bert/encoder/layer_11/attention/self/Reshape/shape_Concat__243" [label="[1]", style=dashed]; +"181 Constant_nncf_181" -> "182 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__241" [label="[1]", style=dashed]; +"182 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__241" -> "541 bert/encoder/layer_11/attention/self/Reshape/shape_Concat__243" [label="[1]", style=dashed]; +"183 Constant_nncf_183" -> "184 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__240" [label="[1]", style=dashed]; +"184 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__240" -> "541 bert/encoder/layer_11/attention/self/Reshape/shape_Concat__243" [label="[1]", style=dashed]; +"185 Constant_nncf_185" -> "186 bert/encoder/layer_10/attention/self/Reshape_3/shape_Unsqueeze__245" [label="[1]", style=dashed]; +"186 bert/encoder/layer_10/attention/self/Reshape_3/shape_Unsqueeze__245" -> "546 bert/encoder/layer_10/attention/self/Reshape_3/shape_Concat__246" [label="[1]", style=dashed]; +"187 Constant_nncf_187" -> "188 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__250" [label="[1]", style=dashed]; +"188 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__250" -> "550 bert/encoder/layer_10/attention/self/Reshape_2/shape_Concat__251" [label="[1]", style=dashed]; +"189 Constant_nncf_189" -> "190 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__249" [label="[1]", style=dashed]; +"190 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__249" -> "550 bert/encoder/layer_10/attention/self/Reshape_2/shape_Concat__251" [label="[1]", style=dashed]; +"191 Constant_nncf_191" -> "192 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__248" [label="[1]", style=dashed]; +"192 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__248" -> "550 bert/encoder/layer_10/attention/self/Reshape_2/shape_Concat__251" [label="[1]", style=dashed]; +"193 Constant_nncf_193" -> "194 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__255" [label="[1]", style=dashed]; +"194 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__255" -> "554 bert/encoder/layer_10/attention/self/Reshape_1/shape_Concat__256" [label="[1]", style=dashed]; +"195 Constant_nncf_195" -> "196 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__254" [label="[1]", style=dashed]; +"196 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__254" -> "554 bert/encoder/layer_10/attention/self/Reshape_1/shape_Concat__256" [label="[1]", style=dashed]; +"197 Constant_nncf_197" -> "198 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__253" [label="[1]", style=dashed]; +"198 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__253" -> "554 bert/encoder/layer_10/attention/self/Reshape_1/shape_Concat__256" [label="[1]", style=dashed]; +"199 Constant_nncf_199" -> "200 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__260" [label="[1]", style=dashed]; +"200 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__260" -> "558 bert/encoder/layer_10/attention/self/Reshape/shape_Concat__261" [label="[1]", style=dashed]; +"201 Constant_nncf_201" -> "202 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__259" [label="[1]", style=dashed]; +"202 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__259" -> "558 bert/encoder/layer_10/attention/self/Reshape/shape_Concat__261" [label="[1]", style=dashed]; +"203 Constant_nncf_203" -> "204 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__258" [label="[1]", style=dashed]; +"204 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__258" -> "558 bert/encoder/layer_10/attention/self/Reshape/shape_Concat__261" [label="[1]", style=dashed]; +"205 Constant_nncf_205" -> "206 bert/encoder/layer_1/attention/self/Reshape_3/shape_Unsqueeze__263" [label="[1]", style=dashed]; +"206 bert/encoder/layer_1/attention/self/Reshape_3/shape_Unsqueeze__263" -> "563 bert/encoder/layer_1/attention/self/Reshape_3/shape_Concat__264" [label="[1]", style=dashed]; +"207 Constant_nncf_207" -> "208 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__268" [label="[1]", style=dashed]; +"208 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__268" -> "567 bert/encoder/layer_1/attention/self/Reshape_2/shape_Concat__269" [label="[1]", style=dashed]; +"209 Constant_nncf_209" -> "210 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__267" [label="[1]", style=dashed]; +"210 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__267" -> "567 bert/encoder/layer_1/attention/self/Reshape_2/shape_Concat__269" [label="[1]", style=dashed]; +"211 Constant_nncf_211" -> "212 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__266" [label="[1]", style=dashed]; +"212 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__266" -> "567 bert/encoder/layer_1/attention/self/Reshape_2/shape_Concat__269" [label="[1]", style=dashed]; +"213 Constant_nncf_213" -> "214 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__273" [label="[1]", style=dashed]; +"214 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__273" -> "571 bert/encoder/layer_1/attention/self/Reshape_1/shape_Concat__274" [label="[1]", style=dashed]; +"215 Constant_nncf_215" -> "216 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__272" [label="[1]", style=dashed]; +"216 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__272" -> "571 bert/encoder/layer_1/attention/self/Reshape_1/shape_Concat__274" [label="[1]", style=dashed]; +"217 Constant_nncf_217" -> "218 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__271" [label="[1]", style=dashed]; +"218 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__271" -> "571 bert/encoder/layer_1/attention/self/Reshape_1/shape_Concat__274" [label="[1]", style=dashed]; +"219 Constant_nncf_219" -> "220 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__278" [label="[1]", style=dashed]; +"220 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__278" -> "575 bert/encoder/layer_1/attention/self/Reshape/shape_Concat__279" [label="[1]", style=dashed]; +"221 Constant_nncf_221" -> "222 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__277" [label="[1]", style=dashed]; +"222 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__277" -> "575 bert/encoder/layer_1/attention/self/Reshape/shape_Concat__279" [label="[1]", style=dashed]; +"223 Constant_nncf_223" -> "224 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__276" [label="[1]", style=dashed]; +"224 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__276" -> "575 bert/encoder/layer_1/attention/self/Reshape/shape_Concat__279" [label="[1]", style=dashed]; +"225 Constant_nncf_225" -> "226 bert/encoder/layer_0/attention/self/Reshape_3/shape_Unsqueeze__281" [label="[1]", style=dashed]; +"226 bert/encoder/layer_0/attention/self/Reshape_3/shape_Unsqueeze__281" -> "580 bert/encoder/layer_0/attention/self/Reshape_3/shape_Concat__282" [label="[1]", style=dashed]; +"227 Constant_nncf_227" -> "228 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__286" [label="[1]", style=dashed]; +"228 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__286" -> "584 bert/encoder/layer_0/attention/self/Reshape_2/shape_Concat__287" [label="[1]", style=dashed]; +"229 Constant_nncf_229" -> "230 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__285" [label="[1]", style=dashed]; +"230 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__285" -> "584 bert/encoder/layer_0/attention/self/Reshape_2/shape_Concat__287" [label="[1]", style=dashed]; +"231 Constant_nncf_231" -> "232 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__284" [label="[1]", style=dashed]; +"232 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__284" -> "584 bert/encoder/layer_0/attention/self/Reshape_2/shape_Concat__287" [label="[1]", style=dashed]; +"233 Constant_nncf_233" -> "234 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__291" [label="[1]", style=dashed]; +"234 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__291" -> "588 bert/encoder/layer_0/attention/self/Reshape_1/shape_Concat__292" [label="[1]", style=dashed]; +"235 Constant_nncf_235" -> "236 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__290" [label="[1]", style=dashed]; +"236 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__290" -> "588 bert/encoder/layer_0/attention/self/Reshape_1/shape_Concat__292" [label="[1]", style=dashed]; +"237 Constant_nncf_237" -> "238 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__289" [label="[1]", style=dashed]; +"238 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__289" -> "588 bert/encoder/layer_0/attention/self/Reshape_1/shape_Concat__292" [label="[1]", style=dashed]; +"239 Constant_nncf_239" -> "240 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__296" [label="[1]", style=dashed]; +"240 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__296" -> "592 bert/encoder/layer_0/attention/self/Reshape/shape_Concat__297" [label="[1]", style=dashed]; +"241 Constant_nncf_241" -> "242 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__295" [label="[1]", style=dashed]; +"242 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__295" -> "592 bert/encoder/layer_0/attention/self/Reshape/shape_Concat__297" [label="[1]", style=dashed]; +"243 Constant_nncf_243" -> "244 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__294" [label="[1]", style=dashed]; +"244 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__294" -> "592 bert/encoder/layer_0/attention/self/Reshape/shape_Concat__297" [label="[1]", style=dashed]; +"245 bert/encoder/Shape" -> "246 bert/encoder/Shape__12" [label="[2]", style=dashed]; +"246 bert/encoder/Shape__12" -> "247 bert/encoder/strided_slice" [label="[2]", style=solid]; +"247 bert/encoder/strided_slice" -> "249 bert/encoder/strided_slice__16" [label="[1]", style=solid]; +"248 Constant_nncf_248" -> "249 bert/encoder/strided_slice__16" [label="[1]", style=dashed]; +"249 bert/encoder/strided_slice__16" -> "250 bert/encoder/strided_slice__17" [label="[]", style=solid]; +"250 bert/encoder/strided_slice__17" -> "252 bert/encoder/ones/packed_Unsqueeze__18" [label="[]", style=dashed]; +"250 bert/encoder/strided_slice__17" -> "262 bert/encoder/Reshape/shape_Unsqueeze__23" [label="[]", style=dashed]; +"251 Constant_nncf_251" -> "252 bert/encoder/ones/packed_Unsqueeze__18" [label="[1]", style=dashed]; +"252 bert/encoder/ones/packed_Unsqueeze__18" -> "253 bert/encoder/ones/packed_Concat__21" [label="[1]", style=dashed]; +"253 bert/encoder/ones/packed_Concat__21" -> "254 bert/encoder/ones__22" [label="[3]", style=dashed]; +"254 bert/encoder/ones__22" -> "255 bert/encoder/ones" [label="[3]", style=dashed]; +"255 bert/encoder/ones" -> "271 bert/encoder/mul" [label="[-1, -1, -1]", style=solid]; +"256 Constant_nncf_256" -> "257 bert/encoder/Reshape_13/shape_Unsqueeze__300" [label="[1]", style=dashed]; +"257 bert/encoder/Reshape_13/shape_Unsqueeze__300" -> "596 bert/encoder/Reshape_13/shape_Concat__301" [label="[1]", style=dashed]; +"258 Constant_nncf_258" -> "259 bert/encoder/Reshape_13/shape_Unsqueeze__299" [label="[1]", style=dashed]; +"259 bert/encoder/Reshape_13/shape_Unsqueeze__299" -> "596 bert/encoder/Reshape_13/shape_Concat__301" [label="[1]", style=dashed]; +"260 bert/encoder/Reshape_1__302" -> "598 bert/encoder/Reshape_1" [label="[2]", style=dashed]; +"261 Constant_nncf_261" -> "262 bert/encoder/Reshape/shape_Unsqueeze__23" [label="[1]", style=dashed]; +"262 bert/encoder/Reshape/shape_Unsqueeze__23" -> "267 bert/encoder/Reshape/shape_Concat__26" [label="[1]", style=dashed]; +"263 Constant_nncf_263" -> "264 bert/encoder/Reshape/shape_Unsqueeze__25" [label="[1]", style=dashed]; +"264 bert/encoder/Reshape/shape_Unsqueeze__25" -> "267 bert/encoder/Reshape/shape_Concat__26" [label="[1]", style=dashed]; +"265 Constant_nncf_265" -> "266 bert/encoder/Reshape/shape_Unsqueeze__24" [label="[1]", style=dashed]; +"266 bert/encoder/Reshape/shape_Unsqueeze__24" -> "267 bert/encoder/Reshape/shape_Concat__26" [label="[1]", style=dashed]; +"267 bert/encoder/Reshape/shape_Concat__26" -> "268 bert/encoder/Reshape__27" [label="[3]", style=dashed]; +"268 bert/encoder/Reshape__27" -> "269 bert/encoder/Reshape" [label="[3]", style=dashed]; +"269 bert/encoder/Reshape" -> "270 bert/encoder/Cast" [label="[]", style=dashed]; +"270 bert/encoder/Cast" -> "271 bert/encoder/mul" [label="[]", style=solid]; +"271 bert/encoder/mul" -> "272 bert/encoder/layer_9/attention/self/ExpandDims" [label="[]", style=solid]; +"271 bert/encoder/mul" -> "275 bert/encoder/layer_8/attention/self/ExpandDims" [label="[]", style=solid]; +"271 bert/encoder/mul" -> "278 bert/encoder/layer_7/attention/self/ExpandDims" [label="[]", style=solid]; +"271 bert/encoder/mul" -> "281 bert/encoder/layer_6/attention/self/ExpandDims" [label="[]", style=solid]; +"271 bert/encoder/mul" -> "284 bert/encoder/layer_5/attention/self/ExpandDims" [label="[]", style=solid]; +"271 bert/encoder/mul" -> "287 bert/encoder/layer_4/attention/self/ExpandDims" [label="[]", style=solid]; +"271 bert/encoder/mul" -> "290 bert/encoder/layer_3/attention/self/ExpandDims" [label="[]", style=solid]; +"271 bert/encoder/mul" -> "293 bert/encoder/layer_2/attention/self/ExpandDims" [label="[]", style=solid]; +"271 bert/encoder/mul" -> "296 bert/encoder/layer_11/attention/self/ExpandDims" [label="[]", style=solid]; +"271 bert/encoder/mul" -> "299 bert/encoder/layer_10/attention/self/ExpandDims" [label="[]", style=solid]; +"271 bert/encoder/mul" -> "302 bert/encoder/layer_1/attention/self/ExpandDims" [label="[]", style=solid]; +"271 bert/encoder/mul" -> "305 bert/encoder/layer_0/attention/self/ExpandDims" [label="[]", style=solid]; +"272 bert/encoder/layer_9/attention/self/ExpandDims" -> "273 bert/encoder/layer_9/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"273 bert/encoder/layer_9/attention/self/sub" -> "274 bert/encoder/layer_9/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"274 bert/encoder/layer_9/attention/self/mul_1" -> "1467 bert/encoder/layer_9/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"275 bert/encoder/layer_8/attention/self/ExpandDims" -> "276 bert/encoder/layer_8/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"276 bert/encoder/layer_8/attention/self/sub" -> "277 bert/encoder/layer_8/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"277 bert/encoder/layer_8/attention/self/mul_1" -> "1374 bert/encoder/layer_8/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"278 bert/encoder/layer_7/attention/self/ExpandDims" -> "279 bert/encoder/layer_7/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"279 bert/encoder/layer_7/attention/self/sub" -> "280 bert/encoder/layer_7/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"280 bert/encoder/layer_7/attention/self/mul_1" -> "1281 bert/encoder/layer_7/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"281 bert/encoder/layer_6/attention/self/ExpandDims" -> "282 bert/encoder/layer_6/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"282 bert/encoder/layer_6/attention/self/sub" -> "283 bert/encoder/layer_6/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"283 bert/encoder/layer_6/attention/self/mul_1" -> "1188 bert/encoder/layer_6/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"284 bert/encoder/layer_5/attention/self/ExpandDims" -> "285 bert/encoder/layer_5/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"285 bert/encoder/layer_5/attention/self/sub" -> "286 bert/encoder/layer_5/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"286 bert/encoder/layer_5/attention/self/mul_1" -> "1095 bert/encoder/layer_5/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"287 bert/encoder/layer_4/attention/self/ExpandDims" -> "288 bert/encoder/layer_4/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"288 bert/encoder/layer_4/attention/self/sub" -> "289 bert/encoder/layer_4/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"289 bert/encoder/layer_4/attention/self/mul_1" -> "1002 bert/encoder/layer_4/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"290 bert/encoder/layer_3/attention/self/ExpandDims" -> "291 bert/encoder/layer_3/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"291 bert/encoder/layer_3/attention/self/sub" -> "292 bert/encoder/layer_3/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"292 bert/encoder/layer_3/attention/self/mul_1" -> "909 bert/encoder/layer_3/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"293 bert/encoder/layer_2/attention/self/ExpandDims" -> "294 bert/encoder/layer_2/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"294 bert/encoder/layer_2/attention/self/sub" -> "295 bert/encoder/layer_2/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"295 bert/encoder/layer_2/attention/self/mul_1" -> "816 bert/encoder/layer_2/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"296 bert/encoder/layer_11/attention/self/ExpandDims" -> "297 bert/encoder/layer_11/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"297 bert/encoder/layer_11/attention/self/sub" -> "298 bert/encoder/layer_11/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"298 bert/encoder/layer_11/attention/self/mul_1" -> "1653 bert/encoder/layer_11/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"299 bert/encoder/layer_10/attention/self/ExpandDims" -> "300 bert/encoder/layer_10/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"300 bert/encoder/layer_10/attention/self/sub" -> "301 bert/encoder/layer_10/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"301 bert/encoder/layer_10/attention/self/mul_1" -> "1560 bert/encoder/layer_10/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"302 bert/encoder/layer_1/attention/self/ExpandDims" -> "303 bert/encoder/layer_1/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"303 bert/encoder/layer_1/attention/self/sub" -> "304 bert/encoder/layer_1/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"304 bert/encoder/layer_1/attention/self/mul_1" -> "723 bert/encoder/layer_1/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"305 bert/encoder/layer_0/attention/self/ExpandDims" -> "306 bert/encoder/layer_0/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"306 bert/encoder/layer_0/attention/self/sub" -> "307 bert/encoder/layer_0/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"307 bert/encoder/layer_0/attention/self/mul_1" -> "630 bert/encoder/layer_0/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"308 bert/embeddings/Slice" -> "310 bert/embeddings/Reshape_4" [label="[256, 768]", style=solid]; +"309 bert/embeddings/Reshape_4__42" -> "310 bert/embeddings/Reshape_4" [label="[3]", style=dashed]; +"310 bert/embeddings/Reshape_4" -> "370 bert/embeddings/add_1" [label="[]", style=solid]; +"311 Constant_nncf_311" -> "312 bert/embeddings/Reshape_3/shape_Unsqueeze__69" [label="[1]", style=dashed]; +"312 bert/embeddings/Reshape_3/shape_Unsqueeze__69" -> "346 bert/embeddings/Reshape_3/shape_Concat__70" [label="[1]", style=dashed]; +"313 Constant_nncf_313" -> "314 bert/embeddings/Reshape_3/shape_Unsqueeze__68" [label="[1]", style=dashed]; +"314 bert/embeddings/Reshape_3/shape_Unsqueeze__68" -> "346 bert/embeddings/Reshape_3/shape_Concat__70" [label="[1]", style=dashed]; +"315 bert/embeddings/Reshape_2__43" -> "316 bert/embeddings/Reshape_2" [label="[1]", style=dashed]; +"316 bert/embeddings/Reshape_2" -> "362 bert/embeddings/one_hot" [label="[]", style=dashed]; +"317 Constant_nncf_317" -> "318 bert/embeddings/Reshape_1/shape_Unsqueeze__57" [label="[1]", style=dashed]; +"318 bert/embeddings/Reshape_1/shape_Unsqueeze__57" -> "331 bert/embeddings/Reshape_1/shape_Concat__58" [label="[1]", style=dashed]; +"319 Constant_nncf_319" -> "320 bert/embeddings/Reshape_1/shape_Unsqueeze__56" [label="[1]", style=dashed]; +"320 bert/embeddings/Reshape_1/shape_Unsqueeze__56" -> "331 bert/embeddings/Reshape_1/shape_Concat__58" [label="[1]", style=dashed]; +"321 bert/embeddings/Reshape__59" -> "333 bert/embeddings/Reshape" [label="[1]", style=dashed]; +"322 bert/embeddings/ExpandDims" -> "323 bert/embeddings/Shape" [label="[-1, 256, 1]", style=dashed]; +"322 bert/embeddings/ExpandDims" -> "333 bert/embeddings/Reshape" [label="[-1, 256, 1]", style=dashed]; +"323 bert/embeddings/Shape" -> "324 bert/embeddings/Shape__49" [label="[3]", style=dashed]; +"324 bert/embeddings/Shape__49" -> "325 bert/embeddings/strided_slice" [label="[3]", style=solid]; +"325 bert/embeddings/strided_slice" -> "327 bert/embeddings/strided_slice__53" [label="[1]", style=solid]; +"326 Constant_nncf_326" -> "327 bert/embeddings/strided_slice__53" [label="[1]", style=dashed]; +"327 bert/embeddings/strided_slice__53" -> "328 bert/embeddings/strided_slice__54" [label="[]", style=solid]; +"328 bert/embeddings/strided_slice__54" -> "330 bert/embeddings/Reshape_1/shape_Unsqueeze__55" [label="[]", style=dashed]; +"329 Constant_nncf_329" -> "330 bert/embeddings/Reshape_1/shape_Unsqueeze__55" [label="[1]", style=dashed]; +"330 bert/embeddings/Reshape_1/shape_Unsqueeze__55" -> "331 bert/embeddings/Reshape_1/shape_Concat__58" [label="[1]", style=dashed]; +"331 bert/embeddings/Reshape_1/shape_Concat__58" -> "332 bert/embeddings/Reshape_1__60" [label="[3]", style=dashed]; +"332 bert/embeddings/Reshape_1__60" -> "337 bert/embeddings/Reshape_1" [label="[3]", style=dashed]; +"333 bert/embeddings/Reshape" -> "336 bert/embeddings/GatherV2" [label="[]", style=dashed]; +"334 QuantizeLinear_bert/embeddings/word_embeddings^0_1" -> "335 DequantizeLinear_bert/embeddings/word_embeddings^0_1" [label="[30522, 768]", style=dashed]; +"335 DequantizeLinear_bert/embeddings/word_embeddings^0_1" -> "336 bert/embeddings/GatherV2" [label="[30522, 768]", style=solid]; +"336 bert/embeddings/GatherV2" -> "337 bert/embeddings/Reshape_1" [label="[]", style=solid]; +"337 bert/embeddings/Reshape_1" -> "338 bert/embeddings/Shape_1" [label="[]", style=solid]; +"337 bert/embeddings/Reshape_1" -> "369 bert/embeddings/add" [label="[]", style=solid]; +"338 bert/embeddings/Shape_1" -> "339 bert/embeddings/Shape_1__61" [label="[-1]", style=dashed]; +"339 bert/embeddings/Shape_1__61" -> "340 bert/embeddings/strided_slice_1" [label="[-1]", style=solid]; +"340 bert/embeddings/strided_slice_1" -> "342 bert/embeddings/strided_slice_1__65" [label="[-1]", style=solid]; +"341 Constant_nncf_339" -> "342 bert/embeddings/strided_slice_1__65" [label="[1]", style=dashed]; +"342 bert/embeddings/strided_slice_1__65" -> "343 bert/embeddings/strided_slice_1__66" [label="[]", style=solid]; +"343 bert/embeddings/strided_slice_1__66" -> "345 bert/embeddings/Reshape_3/shape_Unsqueeze__67" [label="[]", style=dashed]; +"344 Constant_nncf_342" -> "345 bert/embeddings/Reshape_3/shape_Unsqueeze__67" [label="[1]", style=dashed]; +"345 bert/embeddings/Reshape_3/shape_Unsqueeze__67" -> "346 bert/embeddings/Reshape_3/shape_Concat__70" [label="[1]", style=dashed]; +"346 bert/embeddings/Reshape_3/shape_Concat__70" -> "347 bert/embeddings/Reshape_3__71" [label="[3]", style=dashed]; +"347 bert/embeddings/Reshape_3__71" -> "368 bert/embeddings/Reshape_3" [label="[3]", style=dashed]; +"348 Constant_nncf_346" -> "349 Unsqueeze__46" [label="[1]", style=dashed]; +"349 Unsqueeze__46" -> "361 Concat__47" [label="[1]", style=solid]; +"350 Constant_nncf_348" -> "351 Unsqueeze__45" [label="[1]", style=dashed]; +"351 Unsqueeze__45" -> "361 Concat__47" [label="[1]", style=solid]; +"352 Constant_nncf_350" -> "353 Unsqueeze__44" [label="[1]", style=dashed]; +"353 Unsqueeze__44" -> "362 bert/embeddings/one_hot" [label="[1]", style=dashed]; +"354 Constant_nncf_352" -> "355 Reshape_1/shape_Unsqueeze__480" [label="[1]", style=dashed]; +"355 Reshape_1/shape_Unsqueeze__480" -> "1731 Reshape_1/shape_Concat__481" [label="[1]", style=dashed]; +"356 Constant_nncf_354" -> "357 Reshape_1/shape_Unsqueeze__479" [label="[1]", style=dashed]; +"357 Reshape_1/shape_Unsqueeze__479" -> "1731 Reshape_1/shape_Concat__481" [label="[1]", style=dashed]; +"358 Constant_nncf_356" -> "359 Reshape/shape_Unsqueeze__483" [label="[1]", style=dashed]; +"359 Reshape/shape_Unsqueeze__483" -> "1727 Reshape/shape_Concat__484" [label="[1]", style=dashed]; +"360 MatMul__486" -> "1734 QuantizeLinear_MatMul__486^0_1" [label="[768, 2]", style=solid]; +"361 Concat__47" -> "362 bert/embeddings/one_hot" [label="[2]", style=solid]; +"362 bert/embeddings/one_hot" -> "363 QuantizeLinear_bert/embeddings/one_hot^0_1" [label="[]", style=solid]; +"363 QuantizeLinear_bert/embeddings/one_hot^0_1" -> "364 DequantizeLinear_bert/embeddings/one_hot^0_1" [label="[]", style=dashed]; +"364 DequantizeLinear_bert/embeddings/one_hot^0_1" -> "367 bert/embeddings/MatMul" [label="[]", style=solid]; +"365 QuantizeLinear_bert/embeddings/token_type_embeddings^0_1" -> "366 DequantizeLinear_bert/embeddings/token_type_embeddings^0_1" [label="[2, 768]", style=dashed]; +"366 DequantizeLinear_bert/embeddings/token_type_embeddings^0_1" -> "367 bert/embeddings/MatMul" [label="[2, 768]", style=solid]; +"367 bert/embeddings/MatMul" -> "368 bert/embeddings/Reshape_3" [label="[]", style=solid]; +"368 bert/embeddings/Reshape_3" -> "369 bert/embeddings/add" [label="[]", style=solid]; +"369 bert/embeddings/add" -> "370 bert/embeddings/add_1" [label="[]", style=solid]; +"370 bert/embeddings/add_1" -> "371 bert/embeddings/LayerNorm/moments/mean" [label="[]", style=solid]; +"370 bert/embeddings/add_1" -> "373 bert/embeddings/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"370 bert/embeddings/add_1" -> "382 bert/embeddings/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"371 bert/embeddings/LayerNorm/moments/mean" -> "372 bert/embeddings/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"371 bert/embeddings/LayerNorm/moments/mean" -> "380 bert/embeddings/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"372 bert/embeddings/LayerNorm/moments/StopGradient" -> "373 bert/embeddings/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"373 bert/embeddings/LayerNorm/moments/SquaredDifference" -> "374 bert/embeddings/LayerNorm/moments/SquaredDifference__72" [label="[]", style=solid]; +"374 bert/embeddings/LayerNorm/moments/SquaredDifference__72" -> "375 bert/embeddings/LayerNorm/moments/variance" [label="[]", style=solid]; +"375 bert/embeddings/LayerNorm/moments/variance" -> "376 bert/embeddings/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"376 bert/embeddings/LayerNorm/batchnorm/add" -> "377 bert/embeddings/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"377 bert/embeddings/LayerNorm/batchnorm/Rsqrt" -> "378 bert/embeddings/LayerNorm/batchnorm/Rsqrt__74" [label="[]", style=solid]; +"378 bert/embeddings/LayerNorm/batchnorm/Rsqrt__74" -> "379 bert/embeddings/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"379 bert/embeddings/LayerNorm/batchnorm/mul" -> "380 bert/embeddings/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"379 bert/embeddings/LayerNorm/batchnorm/mul" -> "382 bert/embeddings/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"380 bert/embeddings/LayerNorm/batchnorm/mul_2" -> "381 bert/embeddings/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"381 bert/embeddings/LayerNorm/batchnorm/sub" -> "383 bert/embeddings/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"382 bert/embeddings/LayerNorm/batchnorm/mul_1" -> "383 bert/embeddings/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"383 bert/embeddings/LayerNorm/batchnorm/add_1" -> "384 bert/encoder/Shape_2" [label="[]", style=solid]; +"383 bert/embeddings/LayerNorm/batchnorm/add_1" -> "598 bert/encoder/Reshape_1" [label="[]", style=solid]; +"384 bert/encoder/Shape_2" -> "385 bert/encoder/Shape_2__76" [label="[-1]", style=dashed]; +"385 bert/encoder/Shape_2__76" -> "386 bert/encoder/strided_slice_2" [label="[-1]", style=solid]; +"386 bert/encoder/strided_slice_2" -> "388 bert/encoder/strided_slice_2__80" [label="[-1]", style=solid]; +"387 Constant_nncf_381" -> "388 bert/encoder/strided_slice_2__80" [label="[1]", style=dashed]; +"388 bert/encoder/strided_slice_2__80" -> "389 bert/encoder/strided_slice_2__81" [label="[]", style=solid]; +"389 bert/encoder/strided_slice_2__81" -> "390 bert/encoder/layer_9/attention/self/mul_2" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "396 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__85" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "400 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__90" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "404 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__95" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "407 bert/encoder/layer_8/attention/self/mul_2" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "413 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__103" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "417 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__108" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "421 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__113" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "424 bert/encoder/layer_7/attention/self/mul_2" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "430 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__121" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "434 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__126" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "438 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__131" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "441 bert/encoder/layer_6/attention/self/mul_2" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "447 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__139" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "451 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__144" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "455 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__149" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "458 bert/encoder/layer_5/attention/self/mul_2" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "464 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__157" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "468 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__162" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "472 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__167" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "475 bert/encoder/layer_4/attention/self/mul_2" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "481 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__175" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "485 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__180" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "489 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__185" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "492 bert/encoder/layer_3/attention/self/mul_2" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "498 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__193" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "502 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__198" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "506 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__203" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "509 bert/encoder/layer_2/attention/self/mul_2" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "515 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__211" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "519 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__216" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "523 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__221" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "526 bert/encoder/layer_11/attention/self/mul_2" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "532 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__229" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "536 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__234" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "540 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__239" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "543 bert/encoder/layer_10/attention/self/mul_2" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "549 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__247" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "553 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__252" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "557 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__257" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "560 bert/encoder/layer_1/attention/self/mul_2" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "566 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__265" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "570 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__270" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "574 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__275" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "577 bert/encoder/layer_0/attention/self/mul_2" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "583 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__283" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "587 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__288" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "591 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__293" [label="[]", style=dashed]; +"389 bert/encoder/strided_slice_2__81" -> "595 bert/encoder/Reshape_13/shape_Unsqueeze__298" [label="[]", style=dashed]; +"390 bert/encoder/layer_9/attention/self/mul_2" -> "392 bert/encoder/layer_9/attention/self/Reshape_3/shape_Unsqueeze__82" [label="[]", style=dashed]; +"391 Constant_nncf_385" -> "392 bert/encoder/layer_9/attention/self/Reshape_3/shape_Unsqueeze__82" [label="[1]", style=dashed]; +"392 bert/encoder/layer_9/attention/self/Reshape_3/shape_Unsqueeze__82" -> "393 bert/encoder/layer_9/attention/self/Reshape_3/shape_Concat__84" [label="[1]", style=dashed]; +"393 bert/encoder/layer_9/attention/self/Reshape_3/shape_Concat__84" -> "394 bert/encoder/layer_9/attention/self/Reshape_3__434" [label="[2]", style=dashed]; +"394 bert/encoder/layer_9/attention/self/Reshape_3__434" -> "1476 bert/encoder/layer_9/attention/self/Reshape_3" [label="[2]", style=dashed]; +"395 Constant_nncf_389" -> "396 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__85" [label="[1]", style=dashed]; +"396 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__85" -> "397 bert/encoder/layer_9/attention/self/Reshape_2/shape_Concat__89" [label="[1]", style=dashed]; +"397 bert/encoder/layer_9/attention/self/Reshape_2/shape_Concat__89" -> "398 bert/encoder/layer_9/attention/self/Reshape_2__429" [label="[4]", style=dashed]; +"398 bert/encoder/layer_9/attention/self/Reshape_2__429" -> "1446 bert/encoder/layer_9/attention/self/Reshape_2" [label="[4]", style=dashed]; +"399 Constant_nncf_393" -> "400 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__90" [label="[1]", style=dashed]; +"400 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__90" -> "401 bert/encoder/layer_9/attention/self/Reshape_1/shape_Concat__94" [label="[1]", style=dashed]; +"401 bert/encoder/layer_9/attention/self/Reshape_1/shape_Concat__94" -> "402 bert/encoder/layer_9/attention/self/Reshape_1__431" [label="[4]", style=dashed]; +"402 bert/encoder/layer_9/attention/self/Reshape_1__431" -> "1462 bert/encoder/layer_9/attention/self/Reshape_1" [label="[4]", style=dashed]; +"403 Constant_nncf_397" -> "404 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__95" [label="[1]", style=dashed]; +"404 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__95" -> "405 bert/encoder/layer_9/attention/self/Reshape/shape_Concat__99" [label="[1]", style=dashed]; +"405 bert/encoder/layer_9/attention/self/Reshape/shape_Concat__99" -> "406 bert/encoder/layer_9/attention/self/Reshape__430" [label="[4]", style=dashed]; +"406 bert/encoder/layer_9/attention/self/Reshape__430" -> "1454 bert/encoder/layer_9/attention/self/Reshape" [label="[4]", style=dashed]; +"407 bert/encoder/layer_8/attention/self/mul_2" -> "409 bert/encoder/layer_8/attention/self/Reshape_3/shape_Unsqueeze__100" [label="[]", style=dashed]; +"408 Constant_nncf_402" -> "409 bert/encoder/layer_8/attention/self/Reshape_3/shape_Unsqueeze__100" [label="[1]", style=dashed]; +"409 bert/encoder/layer_8/attention/self/Reshape_3/shape_Unsqueeze__100" -> "410 bert/encoder/layer_8/attention/self/Reshape_3/shape_Concat__102" [label="[1]", style=dashed]; +"410 bert/encoder/layer_8/attention/self/Reshape_3/shape_Concat__102" -> "411 bert/encoder/layer_8/attention/self/Reshape_3__420" [label="[2]", style=dashed]; +"411 bert/encoder/layer_8/attention/self/Reshape_3__420" -> "1383 bert/encoder/layer_8/attention/self/Reshape_3" [label="[2]", style=dashed]; +"412 Constant_nncf_406" -> "413 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__103" [label="[1]", style=dashed]; +"413 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__103" -> "414 bert/encoder/layer_8/attention/self/Reshape_2/shape_Concat__107" [label="[1]", style=dashed]; +"414 bert/encoder/layer_8/attention/self/Reshape_2/shape_Concat__107" -> "415 bert/encoder/layer_8/attention/self/Reshape_2__415" [label="[4]", style=dashed]; +"415 bert/encoder/layer_8/attention/self/Reshape_2__415" -> "1353 bert/encoder/layer_8/attention/self/Reshape_2" [label="[4]", style=dashed]; +"416 Constant_nncf_410" -> "417 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__108" [label="[1]", style=dashed]; +"417 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__108" -> "418 bert/encoder/layer_8/attention/self/Reshape_1/shape_Concat__112" [label="[1]", style=dashed]; +"418 bert/encoder/layer_8/attention/self/Reshape_1/shape_Concat__112" -> "419 bert/encoder/layer_8/attention/self/Reshape_1__417" [label="[4]", style=dashed]; +"419 bert/encoder/layer_8/attention/self/Reshape_1__417" -> "1369 bert/encoder/layer_8/attention/self/Reshape_1" [label="[4]", style=dashed]; +"420 Constant_nncf_414" -> "421 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__113" [label="[1]", style=dashed]; +"421 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__113" -> "422 bert/encoder/layer_8/attention/self/Reshape/shape_Concat__117" [label="[1]", style=dashed]; +"422 bert/encoder/layer_8/attention/self/Reshape/shape_Concat__117" -> "423 bert/encoder/layer_8/attention/self/Reshape__416" [label="[4]", style=dashed]; +"423 bert/encoder/layer_8/attention/self/Reshape__416" -> "1361 bert/encoder/layer_8/attention/self/Reshape" [label="[4]", style=dashed]; +"424 bert/encoder/layer_7/attention/self/mul_2" -> "426 bert/encoder/layer_7/attention/self/Reshape_3/shape_Unsqueeze__118" [label="[]", style=dashed]; +"425 Constant_nncf_419" -> "426 bert/encoder/layer_7/attention/self/Reshape_3/shape_Unsqueeze__118" [label="[1]", style=dashed]; +"426 bert/encoder/layer_7/attention/self/Reshape_3/shape_Unsqueeze__118" -> "427 bert/encoder/layer_7/attention/self/Reshape_3/shape_Concat__120" [label="[1]", style=dashed]; +"427 bert/encoder/layer_7/attention/self/Reshape_3/shape_Concat__120" -> "428 bert/encoder/layer_7/attention/self/Reshape_3__406" [label="[2]", style=dashed]; +"428 bert/encoder/layer_7/attention/self/Reshape_3__406" -> "1290 bert/encoder/layer_7/attention/self/Reshape_3" [label="[2]", style=dashed]; +"429 Constant_nncf_423" -> "430 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__121" [label="[1]", style=dashed]; +"430 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__121" -> "431 bert/encoder/layer_7/attention/self/Reshape_2/shape_Concat__125" [label="[1]", style=dashed]; +"431 bert/encoder/layer_7/attention/self/Reshape_2/shape_Concat__125" -> "432 bert/encoder/layer_7/attention/self/Reshape_2__401" [label="[4]", style=dashed]; +"432 bert/encoder/layer_7/attention/self/Reshape_2__401" -> "1260 bert/encoder/layer_7/attention/self/Reshape_2" [label="[4]", style=dashed]; +"433 Constant_nncf_427" -> "434 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__126" [label="[1]", style=dashed]; +"434 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__126" -> "435 bert/encoder/layer_7/attention/self/Reshape_1/shape_Concat__130" [label="[1]", style=dashed]; +"435 bert/encoder/layer_7/attention/self/Reshape_1/shape_Concat__130" -> "436 bert/encoder/layer_7/attention/self/Reshape_1__403" [label="[4]", style=dashed]; +"436 bert/encoder/layer_7/attention/self/Reshape_1__403" -> "1276 bert/encoder/layer_7/attention/self/Reshape_1" [label="[4]", style=dashed]; +"437 Constant_nncf_431" -> "438 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__131" [label="[1]", style=dashed]; +"438 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__131" -> "439 bert/encoder/layer_7/attention/self/Reshape/shape_Concat__135" [label="[1]", style=dashed]; +"439 bert/encoder/layer_7/attention/self/Reshape/shape_Concat__135" -> "440 bert/encoder/layer_7/attention/self/Reshape__402" [label="[4]", style=dashed]; +"440 bert/encoder/layer_7/attention/self/Reshape__402" -> "1268 bert/encoder/layer_7/attention/self/Reshape" [label="[4]", style=dashed]; +"441 bert/encoder/layer_6/attention/self/mul_2" -> "443 bert/encoder/layer_6/attention/self/Reshape_3/shape_Unsqueeze__136" [label="[]", style=dashed]; +"442 Constant_nncf_436" -> "443 bert/encoder/layer_6/attention/self/Reshape_3/shape_Unsqueeze__136" [label="[1]", style=dashed]; +"443 bert/encoder/layer_6/attention/self/Reshape_3/shape_Unsqueeze__136" -> "444 bert/encoder/layer_6/attention/self/Reshape_3/shape_Concat__138" [label="[1]", style=dashed]; +"444 bert/encoder/layer_6/attention/self/Reshape_3/shape_Concat__138" -> "445 bert/encoder/layer_6/attention/self/Reshape_3__392" [label="[2]", style=dashed]; +"445 bert/encoder/layer_6/attention/self/Reshape_3__392" -> "1197 bert/encoder/layer_6/attention/self/Reshape_3" [label="[2]", style=dashed]; +"446 Constant_nncf_440" -> "447 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__139" [label="[1]", style=dashed]; +"447 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__139" -> "448 bert/encoder/layer_6/attention/self/Reshape_2/shape_Concat__143" [label="[1]", style=dashed]; +"448 bert/encoder/layer_6/attention/self/Reshape_2/shape_Concat__143" -> "449 bert/encoder/layer_6/attention/self/Reshape_2__387" [label="[4]", style=dashed]; +"449 bert/encoder/layer_6/attention/self/Reshape_2__387" -> "1167 bert/encoder/layer_6/attention/self/Reshape_2" [label="[4]", style=dashed]; +"450 Constant_nncf_444" -> "451 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__144" [label="[1]", style=dashed]; +"451 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__144" -> "452 bert/encoder/layer_6/attention/self/Reshape_1/shape_Concat__148" [label="[1]", style=dashed]; +"452 bert/encoder/layer_6/attention/self/Reshape_1/shape_Concat__148" -> "453 bert/encoder/layer_6/attention/self/Reshape_1__389" [label="[4]", style=dashed]; +"453 bert/encoder/layer_6/attention/self/Reshape_1__389" -> "1183 bert/encoder/layer_6/attention/self/Reshape_1" [label="[4]", style=dashed]; +"454 Constant_nncf_448" -> "455 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__149" [label="[1]", style=dashed]; +"455 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__149" -> "456 bert/encoder/layer_6/attention/self/Reshape/shape_Concat__153" [label="[1]", style=dashed]; +"456 bert/encoder/layer_6/attention/self/Reshape/shape_Concat__153" -> "457 bert/encoder/layer_6/attention/self/Reshape__388" [label="[4]", style=dashed]; +"457 bert/encoder/layer_6/attention/self/Reshape__388" -> "1175 bert/encoder/layer_6/attention/self/Reshape" [label="[4]", style=dashed]; +"458 bert/encoder/layer_5/attention/self/mul_2" -> "460 bert/encoder/layer_5/attention/self/Reshape_3/shape_Unsqueeze__154" [label="[]", style=dashed]; +"459 Constant_nncf_453" -> "460 bert/encoder/layer_5/attention/self/Reshape_3/shape_Unsqueeze__154" [label="[1]", style=dashed]; +"460 bert/encoder/layer_5/attention/self/Reshape_3/shape_Unsqueeze__154" -> "461 bert/encoder/layer_5/attention/self/Reshape_3/shape_Concat__156" [label="[1]", style=dashed]; +"461 bert/encoder/layer_5/attention/self/Reshape_3/shape_Concat__156" -> "462 bert/encoder/layer_5/attention/self/Reshape_3__378" [label="[2]", style=dashed]; +"462 bert/encoder/layer_5/attention/self/Reshape_3__378" -> "1104 bert/encoder/layer_5/attention/self/Reshape_3" [label="[2]", style=dashed]; +"463 Constant_nncf_457" -> "464 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__157" [label="[1]", style=dashed]; +"464 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__157" -> "465 bert/encoder/layer_5/attention/self/Reshape_2/shape_Concat__161" [label="[1]", style=dashed]; +"465 bert/encoder/layer_5/attention/self/Reshape_2/shape_Concat__161" -> "466 bert/encoder/layer_5/attention/self/Reshape_2__373" [label="[4]", style=dashed]; +"466 bert/encoder/layer_5/attention/self/Reshape_2__373" -> "1074 bert/encoder/layer_5/attention/self/Reshape_2" [label="[4]", style=dashed]; +"467 Constant_nncf_461" -> "468 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__162" [label="[1]", style=dashed]; +"468 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__162" -> "469 bert/encoder/layer_5/attention/self/Reshape_1/shape_Concat__166" [label="[1]", style=dashed]; +"469 bert/encoder/layer_5/attention/self/Reshape_1/shape_Concat__166" -> "470 bert/encoder/layer_5/attention/self/Reshape_1__375" [label="[4]", style=dashed]; +"470 bert/encoder/layer_5/attention/self/Reshape_1__375" -> "1090 bert/encoder/layer_5/attention/self/Reshape_1" [label="[4]", style=dashed]; +"471 Constant_nncf_465" -> "472 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__167" [label="[1]", style=dashed]; +"472 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__167" -> "473 bert/encoder/layer_5/attention/self/Reshape/shape_Concat__171" [label="[1]", style=dashed]; +"473 bert/encoder/layer_5/attention/self/Reshape/shape_Concat__171" -> "474 bert/encoder/layer_5/attention/self/Reshape__374" [label="[4]", style=dashed]; +"474 bert/encoder/layer_5/attention/self/Reshape__374" -> "1082 bert/encoder/layer_5/attention/self/Reshape" [label="[4]", style=dashed]; +"475 bert/encoder/layer_4/attention/self/mul_2" -> "477 bert/encoder/layer_4/attention/self/Reshape_3/shape_Unsqueeze__172" [label="[]", style=dashed]; +"476 Constant_nncf_470" -> "477 bert/encoder/layer_4/attention/self/Reshape_3/shape_Unsqueeze__172" [label="[1]", style=dashed]; +"477 bert/encoder/layer_4/attention/self/Reshape_3/shape_Unsqueeze__172" -> "478 bert/encoder/layer_4/attention/self/Reshape_3/shape_Concat__174" [label="[1]", style=dashed]; +"478 bert/encoder/layer_4/attention/self/Reshape_3/shape_Concat__174" -> "479 bert/encoder/layer_4/attention/self/Reshape_3__364" [label="[2]", style=dashed]; +"479 bert/encoder/layer_4/attention/self/Reshape_3__364" -> "1011 bert/encoder/layer_4/attention/self/Reshape_3" [label="[2]", style=dashed]; +"480 Constant_nncf_474" -> "481 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__175" [label="[1]", style=dashed]; +"481 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__175" -> "482 bert/encoder/layer_4/attention/self/Reshape_2/shape_Concat__179" [label="[1]", style=dashed]; +"482 bert/encoder/layer_4/attention/self/Reshape_2/shape_Concat__179" -> "483 bert/encoder/layer_4/attention/self/Reshape_2__359" [label="[4]", style=dashed]; +"483 bert/encoder/layer_4/attention/self/Reshape_2__359" -> "981 bert/encoder/layer_4/attention/self/Reshape_2" [label="[4]", style=dashed]; +"484 Constant_nncf_478" -> "485 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__180" [label="[1]", style=dashed]; +"485 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__180" -> "486 bert/encoder/layer_4/attention/self/Reshape_1/shape_Concat__184" [label="[1]", style=dashed]; +"486 bert/encoder/layer_4/attention/self/Reshape_1/shape_Concat__184" -> "487 bert/encoder/layer_4/attention/self/Reshape_1__361" [label="[4]", style=dashed]; +"487 bert/encoder/layer_4/attention/self/Reshape_1__361" -> "997 bert/encoder/layer_4/attention/self/Reshape_1" [label="[4]", style=dashed]; +"488 Constant_nncf_482" -> "489 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__185" [label="[1]", style=dashed]; +"489 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__185" -> "490 bert/encoder/layer_4/attention/self/Reshape/shape_Concat__189" [label="[1]", style=dashed]; +"490 bert/encoder/layer_4/attention/self/Reshape/shape_Concat__189" -> "491 bert/encoder/layer_4/attention/self/Reshape__360" [label="[4]", style=dashed]; +"491 bert/encoder/layer_4/attention/self/Reshape__360" -> "989 bert/encoder/layer_4/attention/self/Reshape" [label="[4]", style=dashed]; +"492 bert/encoder/layer_3/attention/self/mul_2" -> "494 bert/encoder/layer_3/attention/self/Reshape_3/shape_Unsqueeze__190" [label="[]", style=dashed]; +"493 Constant_nncf_487" -> "494 bert/encoder/layer_3/attention/self/Reshape_3/shape_Unsqueeze__190" [label="[1]", style=dashed]; +"494 bert/encoder/layer_3/attention/self/Reshape_3/shape_Unsqueeze__190" -> "495 bert/encoder/layer_3/attention/self/Reshape_3/shape_Concat__192" [label="[1]", style=dashed]; +"495 bert/encoder/layer_3/attention/self/Reshape_3/shape_Concat__192" -> "496 bert/encoder/layer_3/attention/self/Reshape_3__350" [label="[2]", style=dashed]; +"496 bert/encoder/layer_3/attention/self/Reshape_3__350" -> "918 bert/encoder/layer_3/attention/self/Reshape_3" [label="[2]", style=dashed]; +"497 Constant_nncf_491" -> "498 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__193" [label="[1]", style=dashed]; +"498 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__193" -> "499 bert/encoder/layer_3/attention/self/Reshape_2/shape_Concat__197" [label="[1]", style=dashed]; +"499 bert/encoder/layer_3/attention/self/Reshape_2/shape_Concat__197" -> "500 bert/encoder/layer_3/attention/self/Reshape_2__345" [label="[4]", style=dashed]; +"500 bert/encoder/layer_3/attention/self/Reshape_2__345" -> "888 bert/encoder/layer_3/attention/self/Reshape_2" [label="[4]", style=dashed]; +"501 Constant_nncf_495" -> "502 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__198" [label="[1]", style=dashed]; +"502 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__198" -> "503 bert/encoder/layer_3/attention/self/Reshape_1/shape_Concat__202" [label="[1]", style=dashed]; +"503 bert/encoder/layer_3/attention/self/Reshape_1/shape_Concat__202" -> "504 bert/encoder/layer_3/attention/self/Reshape_1__347" [label="[4]", style=dashed]; +"504 bert/encoder/layer_3/attention/self/Reshape_1__347" -> "904 bert/encoder/layer_3/attention/self/Reshape_1" [label="[4]", style=dashed]; +"505 Constant_nncf_499" -> "506 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__203" [label="[1]", style=dashed]; +"506 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__203" -> "507 bert/encoder/layer_3/attention/self/Reshape/shape_Concat__207" [label="[1]", style=dashed]; +"507 bert/encoder/layer_3/attention/self/Reshape/shape_Concat__207" -> "508 bert/encoder/layer_3/attention/self/Reshape__346" [label="[4]", style=dashed]; +"508 bert/encoder/layer_3/attention/self/Reshape__346" -> "896 bert/encoder/layer_3/attention/self/Reshape" [label="[4]", style=dashed]; +"509 bert/encoder/layer_2/attention/self/mul_2" -> "511 bert/encoder/layer_2/attention/self/Reshape_3/shape_Unsqueeze__208" [label="[]", style=dashed]; +"510 Constant_nncf_504" -> "511 bert/encoder/layer_2/attention/self/Reshape_3/shape_Unsqueeze__208" [label="[1]", style=dashed]; +"511 bert/encoder/layer_2/attention/self/Reshape_3/shape_Unsqueeze__208" -> "512 bert/encoder/layer_2/attention/self/Reshape_3/shape_Concat__210" [label="[1]", style=dashed]; +"512 bert/encoder/layer_2/attention/self/Reshape_3/shape_Concat__210" -> "513 bert/encoder/layer_2/attention/self/Reshape_3__336" [label="[2]", style=dashed]; +"513 bert/encoder/layer_2/attention/self/Reshape_3__336" -> "825 bert/encoder/layer_2/attention/self/Reshape_3" [label="[2]", style=dashed]; +"514 Constant_nncf_508" -> "515 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__211" [label="[1]", style=dashed]; +"515 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__211" -> "516 bert/encoder/layer_2/attention/self/Reshape_2/shape_Concat__215" [label="[1]", style=dashed]; +"516 bert/encoder/layer_2/attention/self/Reshape_2/shape_Concat__215" -> "517 bert/encoder/layer_2/attention/self/Reshape_2__331" [label="[4]", style=dashed]; +"517 bert/encoder/layer_2/attention/self/Reshape_2__331" -> "795 bert/encoder/layer_2/attention/self/Reshape_2" [label="[4]", style=dashed]; +"518 Constant_nncf_512" -> "519 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__216" [label="[1]", style=dashed]; +"519 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__216" -> "520 bert/encoder/layer_2/attention/self/Reshape_1/shape_Concat__220" [label="[1]", style=dashed]; +"520 bert/encoder/layer_2/attention/self/Reshape_1/shape_Concat__220" -> "521 bert/encoder/layer_2/attention/self/Reshape_1__333" [label="[4]", style=dashed]; +"521 bert/encoder/layer_2/attention/self/Reshape_1__333" -> "811 bert/encoder/layer_2/attention/self/Reshape_1" [label="[4]", style=dashed]; +"522 Constant_nncf_516" -> "523 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__221" [label="[1]", style=dashed]; +"523 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__221" -> "524 bert/encoder/layer_2/attention/self/Reshape/shape_Concat__225" [label="[1]", style=dashed]; +"524 bert/encoder/layer_2/attention/self/Reshape/shape_Concat__225" -> "525 bert/encoder/layer_2/attention/self/Reshape__332" [label="[4]", style=dashed]; +"525 bert/encoder/layer_2/attention/self/Reshape__332" -> "803 bert/encoder/layer_2/attention/self/Reshape" [label="[4]", style=dashed]; +"526 bert/encoder/layer_11/attention/self/mul_2" -> "528 bert/encoder/layer_11/attention/self/Reshape_3/shape_Unsqueeze__226" [label="[]", style=dashed]; +"527 Constant_nncf_521" -> "528 bert/encoder/layer_11/attention/self/Reshape_3/shape_Unsqueeze__226" [label="[1]", style=dashed]; +"528 bert/encoder/layer_11/attention/self/Reshape_3/shape_Unsqueeze__226" -> "529 bert/encoder/layer_11/attention/self/Reshape_3/shape_Concat__228" [label="[1]", style=dashed]; +"529 bert/encoder/layer_11/attention/self/Reshape_3/shape_Concat__228" -> "530 bert/encoder/layer_11/attention/self/Reshape_3__462" [label="[2]", style=dashed]; +"530 bert/encoder/layer_11/attention/self/Reshape_3__462" -> "1662 bert/encoder/layer_11/attention/self/Reshape_3" [label="[2]", style=dashed]; +"531 Constant_nncf_525" -> "532 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__229" [label="[1]", style=dashed]; +"532 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__229" -> "533 bert/encoder/layer_11/attention/self/Reshape_2/shape_Concat__233" [label="[1]", style=dashed]; +"533 bert/encoder/layer_11/attention/self/Reshape_2/shape_Concat__233" -> "534 bert/encoder/layer_11/attention/self/Reshape_2__457" [label="[4]", style=dashed]; +"534 bert/encoder/layer_11/attention/self/Reshape_2__457" -> "1632 bert/encoder/layer_11/attention/self/Reshape_2" [label="[4]", style=dashed]; +"535 Constant_nncf_529" -> "536 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__234" [label="[1]", style=dashed]; +"536 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__234" -> "537 bert/encoder/layer_11/attention/self/Reshape_1/shape_Concat__238" [label="[1]", style=dashed]; +"537 bert/encoder/layer_11/attention/self/Reshape_1/shape_Concat__238" -> "538 bert/encoder/layer_11/attention/self/Reshape_1__459" [label="[4]", style=dashed]; +"538 bert/encoder/layer_11/attention/self/Reshape_1__459" -> "1648 bert/encoder/layer_11/attention/self/Reshape_1" [label="[4]", style=dashed]; +"539 Constant_nncf_533" -> "540 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__239" [label="[1]", style=dashed]; +"540 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__239" -> "541 bert/encoder/layer_11/attention/self/Reshape/shape_Concat__243" [label="[1]", style=dashed]; +"541 bert/encoder/layer_11/attention/self/Reshape/shape_Concat__243" -> "542 bert/encoder/layer_11/attention/self/Reshape__458" [label="[4]", style=dashed]; +"542 bert/encoder/layer_11/attention/self/Reshape__458" -> "1640 bert/encoder/layer_11/attention/self/Reshape" [label="[4]", style=dashed]; +"543 bert/encoder/layer_10/attention/self/mul_2" -> "545 bert/encoder/layer_10/attention/self/Reshape_3/shape_Unsqueeze__244" [label="[]", style=dashed]; +"544 Constant_nncf_538" -> "545 bert/encoder/layer_10/attention/self/Reshape_3/shape_Unsqueeze__244" [label="[1]", style=dashed]; +"545 bert/encoder/layer_10/attention/self/Reshape_3/shape_Unsqueeze__244" -> "546 bert/encoder/layer_10/attention/self/Reshape_3/shape_Concat__246" [label="[1]", style=dashed]; +"546 bert/encoder/layer_10/attention/self/Reshape_3/shape_Concat__246" -> "547 bert/encoder/layer_10/attention/self/Reshape_3__448" [label="[2]", style=dashed]; +"547 bert/encoder/layer_10/attention/self/Reshape_3__448" -> "1569 bert/encoder/layer_10/attention/self/Reshape_3" [label="[2]", style=dashed]; +"548 Constant_nncf_542" -> "549 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__247" [label="[1]", style=dashed]; +"549 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__247" -> "550 bert/encoder/layer_10/attention/self/Reshape_2/shape_Concat__251" [label="[1]", style=dashed]; +"550 bert/encoder/layer_10/attention/self/Reshape_2/shape_Concat__251" -> "551 bert/encoder/layer_10/attention/self/Reshape_2__443" [label="[4]", style=dashed]; +"551 bert/encoder/layer_10/attention/self/Reshape_2__443" -> "1539 bert/encoder/layer_10/attention/self/Reshape_2" [label="[4]", style=dashed]; +"552 Constant_nncf_546" -> "553 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__252" [label="[1]", style=dashed]; +"553 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__252" -> "554 bert/encoder/layer_10/attention/self/Reshape_1/shape_Concat__256" [label="[1]", style=dashed]; +"554 bert/encoder/layer_10/attention/self/Reshape_1/shape_Concat__256" -> "555 bert/encoder/layer_10/attention/self/Reshape_1__445" [label="[4]", style=dashed]; +"555 bert/encoder/layer_10/attention/self/Reshape_1__445" -> "1555 bert/encoder/layer_10/attention/self/Reshape_1" [label="[4]", style=dashed]; +"556 Constant_nncf_550" -> "557 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__257" [label="[1]", style=dashed]; +"557 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__257" -> "558 bert/encoder/layer_10/attention/self/Reshape/shape_Concat__261" [label="[1]", style=dashed]; +"558 bert/encoder/layer_10/attention/self/Reshape/shape_Concat__261" -> "559 bert/encoder/layer_10/attention/self/Reshape__444" [label="[4]", style=dashed]; +"559 bert/encoder/layer_10/attention/self/Reshape__444" -> "1547 bert/encoder/layer_10/attention/self/Reshape" [label="[4]", style=dashed]; +"560 bert/encoder/layer_1/attention/self/mul_2" -> "562 bert/encoder/layer_1/attention/self/Reshape_3/shape_Unsqueeze__262" [label="[]", style=dashed]; +"561 Constant_nncf_555" -> "562 bert/encoder/layer_1/attention/self/Reshape_3/shape_Unsqueeze__262" [label="[1]", style=dashed]; +"562 bert/encoder/layer_1/attention/self/Reshape_3/shape_Unsqueeze__262" -> "563 bert/encoder/layer_1/attention/self/Reshape_3/shape_Concat__264" [label="[1]", style=dashed]; +"563 bert/encoder/layer_1/attention/self/Reshape_3/shape_Concat__264" -> "564 bert/encoder/layer_1/attention/self/Reshape_3__322" [label="[2]", style=dashed]; +"564 bert/encoder/layer_1/attention/self/Reshape_3__322" -> "732 bert/encoder/layer_1/attention/self/Reshape_3" [label="[2]", style=dashed]; +"565 Constant_nncf_559" -> "566 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__265" [label="[1]", style=dashed]; +"566 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__265" -> "567 bert/encoder/layer_1/attention/self/Reshape_2/shape_Concat__269" [label="[1]", style=dashed]; +"567 bert/encoder/layer_1/attention/self/Reshape_2/shape_Concat__269" -> "568 bert/encoder/layer_1/attention/self/Reshape_2__317" [label="[4]", style=dashed]; +"568 bert/encoder/layer_1/attention/self/Reshape_2__317" -> "702 bert/encoder/layer_1/attention/self/Reshape_2" [label="[4]", style=dashed]; +"569 Constant_nncf_563" -> "570 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__270" [label="[1]", style=dashed]; +"570 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__270" -> "571 bert/encoder/layer_1/attention/self/Reshape_1/shape_Concat__274" [label="[1]", style=dashed]; +"571 bert/encoder/layer_1/attention/self/Reshape_1/shape_Concat__274" -> "572 bert/encoder/layer_1/attention/self/Reshape_1__319" [label="[4]", style=dashed]; +"572 bert/encoder/layer_1/attention/self/Reshape_1__319" -> "718 bert/encoder/layer_1/attention/self/Reshape_1" [label="[4]", style=dashed]; +"573 Constant_nncf_567" -> "574 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__275" [label="[1]", style=dashed]; +"574 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__275" -> "575 bert/encoder/layer_1/attention/self/Reshape/shape_Concat__279" [label="[1]", style=dashed]; +"575 bert/encoder/layer_1/attention/self/Reshape/shape_Concat__279" -> "576 bert/encoder/layer_1/attention/self/Reshape__318" [label="[4]", style=dashed]; +"576 bert/encoder/layer_1/attention/self/Reshape__318" -> "710 bert/encoder/layer_1/attention/self/Reshape" [label="[4]", style=dashed]; +"577 bert/encoder/layer_0/attention/self/mul_2" -> "579 bert/encoder/layer_0/attention/self/Reshape_3/shape_Unsqueeze__280" [label="[]", style=dashed]; +"578 Constant_nncf_572" -> "579 bert/encoder/layer_0/attention/self/Reshape_3/shape_Unsqueeze__280" [label="[1]", style=dashed]; +"579 bert/encoder/layer_0/attention/self/Reshape_3/shape_Unsqueeze__280" -> "580 bert/encoder/layer_0/attention/self/Reshape_3/shape_Concat__282" [label="[1]", style=dashed]; +"580 bert/encoder/layer_0/attention/self/Reshape_3/shape_Concat__282" -> "581 bert/encoder/layer_0/attention/self/Reshape_3__308" [label="[2]", style=dashed]; +"581 bert/encoder/layer_0/attention/self/Reshape_3__308" -> "639 bert/encoder/layer_0/attention/self/Reshape_3" [label="[2]", style=dashed]; +"582 Constant_nncf_576" -> "583 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__283" [label="[1]", style=dashed]; +"583 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__283" -> "584 bert/encoder/layer_0/attention/self/Reshape_2/shape_Concat__287" [label="[1]", style=dashed]; +"584 bert/encoder/layer_0/attention/self/Reshape_2/shape_Concat__287" -> "585 bert/encoder/layer_0/attention/self/Reshape_2__303" [label="[4]", style=dashed]; +"585 bert/encoder/layer_0/attention/self/Reshape_2__303" -> "609 bert/encoder/layer_0/attention/self/Reshape_2" [label="[4]", style=dashed]; +"586 Constant_nncf_580" -> "587 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__288" [label="[1]", style=dashed]; +"587 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__288" -> "588 bert/encoder/layer_0/attention/self/Reshape_1/shape_Concat__292" [label="[1]", style=dashed]; +"588 bert/encoder/layer_0/attention/self/Reshape_1/shape_Concat__292" -> "589 bert/encoder/layer_0/attention/self/Reshape_1__305" [label="[4]", style=dashed]; +"589 bert/encoder/layer_0/attention/self/Reshape_1__305" -> "625 bert/encoder/layer_0/attention/self/Reshape_1" [label="[4]", style=dashed]; +"590 Constant_nncf_584" -> "591 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__293" [label="[1]", style=dashed]; +"591 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__293" -> "592 bert/encoder/layer_0/attention/self/Reshape/shape_Concat__297" [label="[1]", style=dashed]; +"592 bert/encoder/layer_0/attention/self/Reshape/shape_Concat__297" -> "593 bert/encoder/layer_0/attention/self/Reshape__304" [label="[4]", style=dashed]; +"593 bert/encoder/layer_0/attention/self/Reshape__304" -> "617 bert/encoder/layer_0/attention/self/Reshape" [label="[4]", style=dashed]; +"594 Constant_nncf_588" -> "595 bert/encoder/Reshape_13/shape_Unsqueeze__298" [label="[1]", style=dashed]; +"595 bert/encoder/Reshape_13/shape_Unsqueeze__298" -> "596 bert/encoder/Reshape_13/shape_Concat__301" [label="[1]", style=dashed]; +"596 bert/encoder/Reshape_13/shape_Concat__301" -> "597 bert/encoder/Reshape_13__471" [label="[3]", style=dashed]; +"597 bert/encoder/Reshape_13__471" -> "1717 bert/encoder/Reshape_13" [label="[3]", style=dashed]; +"598 bert/encoder/Reshape_1" -> "599 QuantizeLinear_bert/encoder/Reshape_1^0_1" [label="[]", style=solid]; +"598 bert/encoder/Reshape_1" -> "603 QuantizeLinear_bert/encoder/Reshape_1^0_2" [label="[]", style=solid]; +"598 bert/encoder/Reshape_1" -> "605 QuantizeLinear_bert/encoder/Reshape_1^0_3" [label="[]", style=solid]; +"598 bert/encoder/Reshape_1" -> "644 bert/encoder/layer_0/attention/output/add" [label="[]", style=solid]; +"599 QuantizeLinear_bert/encoder/Reshape_1^0_1" -> "600 DequantizeLinear_bert/encoder/Reshape_1^0_1" [label="[]", style=dashed]; +"600 DequantizeLinear_bert/encoder/Reshape_1^0_1" -> "607 bert/encoder/layer_0/attention/self/value/MatMul" [label="[]", style=solid]; +"601 QuantizeLinear_bert/encoder/layer_0/attention/self/value/kernel^0_1" -> "602 DequantizeLinear_bert/encoder/layer_0/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"602 DequantizeLinear_bert/encoder/layer_0/attention/self/value/kernel^0_1" -> "607 bert/encoder/layer_0/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"603 QuantizeLinear_bert/encoder/Reshape_1^0_2" -> "604 DequantizeLinear_bert/encoder/Reshape_1^0_2" [label="[]", style=dashed]; +"604 DequantizeLinear_bert/encoder/Reshape_1^0_2" -> "613 bert/encoder/layer_0/attention/self/query/MatMul" [label="[]", style=solid]; +"605 QuantizeLinear_bert/encoder/Reshape_1^0_3" -> "606 DequantizeLinear_bert/encoder/Reshape_1^0_3" [label="[]", style=dashed]; +"606 DequantizeLinear_bert/encoder/Reshape_1^0_3" -> "621 bert/encoder/layer_0/attention/self/key/MatMul" [label="[]", style=solid]; +"607 bert/encoder/layer_0/attention/self/value/MatMul" -> "608 bert/encoder/layer_0/attention/self/value/BiasAdd" [label="[]", style=solid]; +"608 bert/encoder/layer_0/attention/self/value/BiasAdd" -> "609 bert/encoder/layer_0/attention/self/Reshape_2" [label="[]", style=solid]; +"609 bert/encoder/layer_0/attention/self/Reshape_2" -> "610 bert/encoder/layer_0/attention/self/transpose_2" [label="[]", style=solid]; +"610 bert/encoder/layer_0/attention/self/transpose_2" -> "635 bert/encoder/layer_0/attention/self/MatMul_1" [label="[]", style=solid]; +"611 QuantizeLinear_bert/encoder/layer_0/attention/self/query/kernel^0_1" -> "612 DequantizeLinear_bert/encoder/layer_0/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"612 DequantizeLinear_bert/encoder/layer_0/attention/self/query/kernel^0_1" -> "613 bert/encoder/layer_0/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"613 bert/encoder/layer_0/attention/self/query/MatMul" -> "614 bert/encoder/layer_0/attention/self/query/BiasAdd" [label="[]", style=solid]; +"614 bert/encoder/layer_0/attention/self/query/BiasAdd" -> "615 QuantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"615 QuantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd^0_1" -> "616 DequantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"616 DequantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd^0_1" -> "617 bert/encoder/layer_0/attention/self/Reshape" [label="[]", style=solid]; +"617 bert/encoder/layer_0/attention/self/Reshape" -> "618 bert/encoder/layer_0/attention/self/transpose" [label="[]", style=solid]; +"618 bert/encoder/layer_0/attention/self/transpose" -> "628 bert/encoder/layer_0/attention/self/MatMul" [label="[]", style=solid]; +"619 QuantizeLinear_bert/encoder/layer_0/attention/self/key/kernel^0_1" -> "620 DequantizeLinear_bert/encoder/layer_0/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"620 DequantizeLinear_bert/encoder/layer_0/attention/self/key/kernel^0_1" -> "621 bert/encoder/layer_0/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"621 bert/encoder/layer_0/attention/self/key/MatMul" -> "622 bert/encoder/layer_0/attention/self/key/BiasAdd" [label="[]", style=solid]; +"622 bert/encoder/layer_0/attention/self/key/BiasAdd" -> "623 QuantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"623 QuantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd^0_1" -> "624 DequantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"624 DequantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd^0_1" -> "625 bert/encoder/layer_0/attention/self/Reshape_1" [label="[]", style=solid]; +"625 bert/encoder/layer_0/attention/self/Reshape_1" -> "626 bert/encoder/layer_0/attention/self/transpose_1" [label="[]", style=solid]; +"626 bert/encoder/layer_0/attention/self/transpose_1" -> "627 bert/encoder/layer_0/attention/self/MatMul__306" [label="[]", style=solid]; +"627 bert/encoder/layer_0/attention/self/MatMul__306" -> "628 bert/encoder/layer_0/attention/self/MatMul" [label="[]", style=solid]; +"628 bert/encoder/layer_0/attention/self/MatMul" -> "629 bert/encoder/layer_0/attention/self/Mul" [label="[]", style=solid]; +"629 bert/encoder/layer_0/attention/self/Mul" -> "630 bert/encoder/layer_0/attention/self/add" [label="[]", style=solid]; +"630 bert/encoder/layer_0/attention/self/add" -> "631 Shape_nncf_609" [label="[]", style=solid]; +"630 bert/encoder/layer_0/attention/self/add" -> "632 Flatten_nncf_610" [label="[]", style=solid]; +"631 Shape_nncf_609" -> "634 Reshape_nncf_612" [label="[-1]", style=dashed]; +"632 Flatten_nncf_610" -> "633 bert/encoder/layer_0/attention/self/Softmax" [label="[]", style=solid]; +"633 bert/encoder/layer_0/attention/self/Softmax" -> "634 Reshape_nncf_612" [label="[]", style=solid]; +"634 Reshape_nncf_612" -> "635 bert/encoder/layer_0/attention/self/MatMul_1" [label="[]", style=solid]; +"635 bert/encoder/layer_0/attention/self/MatMul_1" -> "636 QuantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"636 QuantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1^0_1" -> "637 DequantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"637 DequantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1^0_1" -> "638 bert/encoder/layer_0/attention/self/transpose_3" [label="[]", style=solid]; +"638 bert/encoder/layer_0/attention/self/transpose_3" -> "639 bert/encoder/layer_0/attention/self/Reshape_3" [label="[]", style=solid]; +"639 bert/encoder/layer_0/attention/self/Reshape_3" -> "642 bert/encoder/layer_0/attention/output/dense/MatMul" [label="[]", style=solid]; +"640 QuantizeLinear_bert/encoder/layer_0/attention/output/dense/kernel^0_1" -> "641 DequantizeLinear_bert/encoder/layer_0/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"641 DequantizeLinear_bert/encoder/layer_0/attention/output/dense/kernel^0_1" -> "642 bert/encoder/layer_0/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"642 bert/encoder/layer_0/attention/output/dense/MatMul" -> "643 bert/encoder/layer_0/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"643 bert/encoder/layer_0/attention/output/dense/BiasAdd" -> "644 bert/encoder/layer_0/attention/output/add" [label="[]", style=solid]; +"644 bert/encoder/layer_0/attention/output/add" -> "645 bert/encoder/layer_0/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"644 bert/encoder/layer_0/attention/output/add" -> "647 bert/encoder/layer_0/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"644 bert/encoder/layer_0/attention/output/add" -> "656 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"645 bert/encoder/layer_0/attention/output/LayerNorm/moments/mean" -> "646 bert/encoder/layer_0/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"645 bert/encoder/layer_0/attention/output/LayerNorm/moments/mean" -> "654 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"646 bert/encoder/layer_0/attention/output/LayerNorm/moments/StopGradient" -> "647 bert/encoder/layer_0/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"647 bert/encoder/layer_0/attention/output/LayerNorm/moments/SquaredDifference" -> "648 bert/encoder/layer_0/attention/output/LayerNorm/moments/SquaredDifference__309" [label="[]", style=solid]; +"648 bert/encoder/layer_0/attention/output/LayerNorm/moments/SquaredDifference__309" -> "649 bert/encoder/layer_0/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"649 bert/encoder/layer_0/attention/output/LayerNorm/moments/variance" -> "650 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"650 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add" -> "651 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"651 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/Rsqrt" -> "652 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/Rsqrt__311" [label="[]", style=solid]; +"652 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/Rsqrt__311" -> "653 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"653 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul" -> "654 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"653 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul" -> "656 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"654 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_2" -> "655 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"655 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/sub" -> "657 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"656 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_1" -> "657 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"657 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1" -> "658 QuantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"657 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1" -> "678 bert/encoder/layer_0/output/add" [label="[]", style=solid]; +"658 QuantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "659 DequantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"659 DequantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "662 bert/encoder/layer_0/intermediate/dense/MatMul" [label="[]", style=solid]; +"660 QuantizeLinear_bert/encoder/layer_0/intermediate/dense/kernel^0_1" -> "661 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"661 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/kernel^0_1" -> "662 bert/encoder/layer_0/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"662 bert/encoder/layer_0/intermediate/dense/MatMul" -> "663 bert/encoder/layer_0/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"663 bert/encoder/layer_0/intermediate/dense/BiasAdd" -> "664 bert/encoder/layer_0/intermediate/dense/Pow" [label="[]", style=solid]; +"663 bert/encoder/layer_0/intermediate/dense/BiasAdd" -> "666 bert/encoder/layer_0/intermediate/dense/add" [label="[]", style=solid]; +"663 bert/encoder/layer_0/intermediate/dense/BiasAdd" -> "671 bert/encoder/layer_0/intermediate/dense/mul_3" [label="[]", style=solid]; +"664 bert/encoder/layer_0/intermediate/dense/Pow" -> "665 bert/encoder/layer_0/intermediate/dense/mul" [label="[]", style=solid]; +"665 bert/encoder/layer_0/intermediate/dense/mul" -> "666 bert/encoder/layer_0/intermediate/dense/add" [label="[]", style=solid]; +"666 bert/encoder/layer_0/intermediate/dense/add" -> "667 bert/encoder/layer_0/intermediate/dense/mul_1" [label="[]", style=solid]; +"667 bert/encoder/layer_0/intermediate/dense/mul_1" -> "668 bert/encoder/layer_0/intermediate/dense/Tanh" [label="[]", style=solid]; +"668 bert/encoder/layer_0/intermediate/dense/Tanh" -> "669 bert/encoder/layer_0/intermediate/dense/add_1" [label="[]", style=solid]; +"669 bert/encoder/layer_0/intermediate/dense/add_1" -> "670 bert/encoder/layer_0/intermediate/dense/mul_2" [label="[]", style=solid]; +"670 bert/encoder/layer_0/intermediate/dense/mul_2" -> "671 bert/encoder/layer_0/intermediate/dense/mul_3" [label="[]", style=solid]; +"671 bert/encoder/layer_0/intermediate/dense/mul_3" -> "672 QuantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"672 QuantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3^0_1" -> "673 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"673 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3^0_1" -> "676 bert/encoder/layer_0/output/dense/MatMul" [label="[]", style=solid]; +"674 QuantizeLinear_bert/encoder/layer_0/output/dense/kernel^0_1" -> "675 DequantizeLinear_bert/encoder/layer_0/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"675 DequantizeLinear_bert/encoder/layer_0/output/dense/kernel^0_1" -> "676 bert/encoder/layer_0/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"676 bert/encoder/layer_0/output/dense/MatMul" -> "677 bert/encoder/layer_0/output/dense/BiasAdd" [label="[]", style=solid]; +"677 bert/encoder/layer_0/output/dense/BiasAdd" -> "678 bert/encoder/layer_0/output/add" [label="[]", style=solid]; +"678 bert/encoder/layer_0/output/add" -> "679 bert/encoder/layer_0/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"678 bert/encoder/layer_0/output/add" -> "681 bert/encoder/layer_0/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"678 bert/encoder/layer_0/output/add" -> "690 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"679 bert/encoder/layer_0/output/LayerNorm/moments/mean" -> "680 bert/encoder/layer_0/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"679 bert/encoder/layer_0/output/LayerNorm/moments/mean" -> "688 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"680 bert/encoder/layer_0/output/LayerNorm/moments/StopGradient" -> "681 bert/encoder/layer_0/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"681 bert/encoder/layer_0/output/LayerNorm/moments/SquaredDifference" -> "682 bert/encoder/layer_0/output/LayerNorm/moments/SquaredDifference__313" [label="[]", style=solid]; +"682 bert/encoder/layer_0/output/LayerNorm/moments/SquaredDifference__313" -> "683 bert/encoder/layer_0/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"683 bert/encoder/layer_0/output/LayerNorm/moments/variance" -> "684 bert/encoder/layer_0/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"684 bert/encoder/layer_0/output/LayerNorm/batchnorm/add" -> "685 bert/encoder/layer_0/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"685 bert/encoder/layer_0/output/LayerNorm/batchnorm/Rsqrt" -> "686 bert/encoder/layer_0/output/LayerNorm/batchnorm/Rsqrt__315" [label="[]", style=solid]; +"686 bert/encoder/layer_0/output/LayerNorm/batchnorm/Rsqrt__315" -> "687 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"687 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul" -> "688 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"687 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul" -> "690 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"688 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_2" -> "689 bert/encoder/layer_0/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"689 bert/encoder/layer_0/output/LayerNorm/batchnorm/sub" -> "691 bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"690 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_1" -> "691 bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"691 bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1" -> "692 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"691 bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1" -> "696 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"691 bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1" -> "698 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"691 bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1" -> "737 bert/encoder/layer_1/attention/output/add" [label="[]", style=solid]; +"692 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_1" -> "693 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"693 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_1" -> "700 bert/encoder/layer_1/attention/self/value/MatMul" [label="[]", style=solid]; +"694 QuantizeLinear_bert/encoder/layer_1/attention/self/value/kernel^0_1" -> "695 DequantizeLinear_bert/encoder/layer_1/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"695 DequantizeLinear_bert/encoder/layer_1/attention/self/value/kernel^0_1" -> "700 bert/encoder/layer_1/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"696 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_2" -> "697 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"697 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_2" -> "706 bert/encoder/layer_1/attention/self/query/MatMul" [label="[]", style=solid]; +"698 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_3" -> "699 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"699 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_3" -> "714 bert/encoder/layer_1/attention/self/key/MatMul" [label="[]", style=solid]; +"700 bert/encoder/layer_1/attention/self/value/MatMul" -> "701 bert/encoder/layer_1/attention/self/value/BiasAdd" [label="[]", style=solid]; +"701 bert/encoder/layer_1/attention/self/value/BiasAdd" -> "702 bert/encoder/layer_1/attention/self/Reshape_2" [label="[]", style=solid]; +"702 bert/encoder/layer_1/attention/self/Reshape_2" -> "703 bert/encoder/layer_1/attention/self/transpose_2" [label="[]", style=solid]; +"703 bert/encoder/layer_1/attention/self/transpose_2" -> "728 bert/encoder/layer_1/attention/self/MatMul_1" [label="[]", style=solid]; +"704 QuantizeLinear_bert/encoder/layer_1/attention/self/query/kernel^0_1" -> "705 DequantizeLinear_bert/encoder/layer_1/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"705 DequantizeLinear_bert/encoder/layer_1/attention/self/query/kernel^0_1" -> "706 bert/encoder/layer_1/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"706 bert/encoder/layer_1/attention/self/query/MatMul" -> "707 bert/encoder/layer_1/attention/self/query/BiasAdd" [label="[]", style=solid]; +"707 bert/encoder/layer_1/attention/self/query/BiasAdd" -> "708 QuantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"708 QuantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd^0_1" -> "709 DequantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"709 DequantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd^0_1" -> "710 bert/encoder/layer_1/attention/self/Reshape" [label="[]", style=solid]; +"710 bert/encoder/layer_1/attention/self/Reshape" -> "711 bert/encoder/layer_1/attention/self/transpose" [label="[]", style=solid]; +"711 bert/encoder/layer_1/attention/self/transpose" -> "721 bert/encoder/layer_1/attention/self/MatMul" [label="[]", style=solid]; +"712 QuantizeLinear_bert/encoder/layer_1/attention/self/key/kernel^0_1" -> "713 DequantizeLinear_bert/encoder/layer_1/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"713 DequantizeLinear_bert/encoder/layer_1/attention/self/key/kernel^0_1" -> "714 bert/encoder/layer_1/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"714 bert/encoder/layer_1/attention/self/key/MatMul" -> "715 bert/encoder/layer_1/attention/self/key/BiasAdd" [label="[]", style=solid]; +"715 bert/encoder/layer_1/attention/self/key/BiasAdd" -> "716 QuantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"716 QuantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd^0_1" -> "717 DequantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"717 DequantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd^0_1" -> "718 bert/encoder/layer_1/attention/self/Reshape_1" [label="[]", style=solid]; +"718 bert/encoder/layer_1/attention/self/Reshape_1" -> "719 bert/encoder/layer_1/attention/self/transpose_1" [label="[]", style=solid]; +"719 bert/encoder/layer_1/attention/self/transpose_1" -> "720 bert/encoder/layer_1/attention/self/MatMul__320" [label="[]", style=solid]; +"720 bert/encoder/layer_1/attention/self/MatMul__320" -> "721 bert/encoder/layer_1/attention/self/MatMul" [label="[]", style=solid]; +"721 bert/encoder/layer_1/attention/self/MatMul" -> "722 bert/encoder/layer_1/attention/self/Mul" [label="[]", style=solid]; +"722 bert/encoder/layer_1/attention/self/Mul" -> "723 bert/encoder/layer_1/attention/self/add" [label="[]", style=solid]; +"723 bert/encoder/layer_1/attention/self/add" -> "724 Shape_nncf_674" [label="[]", style=solid]; +"723 bert/encoder/layer_1/attention/self/add" -> "725 Flatten_nncf_675" [label="[]", style=solid]; +"724 Shape_nncf_674" -> "727 Reshape_nncf_677" [label="[-1]", style=dashed]; +"725 Flatten_nncf_675" -> "726 bert/encoder/layer_1/attention/self/Softmax" [label="[]", style=solid]; +"726 bert/encoder/layer_1/attention/self/Softmax" -> "727 Reshape_nncf_677" [label="[]", style=solid]; +"727 Reshape_nncf_677" -> "728 bert/encoder/layer_1/attention/self/MatMul_1" [label="[]", style=solid]; +"728 bert/encoder/layer_1/attention/self/MatMul_1" -> "729 QuantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"729 QuantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1^0_1" -> "730 DequantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"730 DequantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1^0_1" -> "731 bert/encoder/layer_1/attention/self/transpose_3" [label="[]", style=solid]; +"731 bert/encoder/layer_1/attention/self/transpose_3" -> "732 bert/encoder/layer_1/attention/self/Reshape_3" [label="[]", style=solid]; +"732 bert/encoder/layer_1/attention/self/Reshape_3" -> "735 bert/encoder/layer_1/attention/output/dense/MatMul" [label="[]", style=solid]; +"733 QuantizeLinear_bert/encoder/layer_1/attention/output/dense/kernel^0_1" -> "734 DequantizeLinear_bert/encoder/layer_1/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"734 DequantizeLinear_bert/encoder/layer_1/attention/output/dense/kernel^0_1" -> "735 bert/encoder/layer_1/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"735 bert/encoder/layer_1/attention/output/dense/MatMul" -> "736 bert/encoder/layer_1/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"736 bert/encoder/layer_1/attention/output/dense/BiasAdd" -> "737 bert/encoder/layer_1/attention/output/add" [label="[]", style=solid]; +"737 bert/encoder/layer_1/attention/output/add" -> "738 bert/encoder/layer_1/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"737 bert/encoder/layer_1/attention/output/add" -> "740 bert/encoder/layer_1/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"737 bert/encoder/layer_1/attention/output/add" -> "749 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"738 bert/encoder/layer_1/attention/output/LayerNorm/moments/mean" -> "739 bert/encoder/layer_1/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"738 bert/encoder/layer_1/attention/output/LayerNorm/moments/mean" -> "747 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"739 bert/encoder/layer_1/attention/output/LayerNorm/moments/StopGradient" -> "740 bert/encoder/layer_1/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"740 bert/encoder/layer_1/attention/output/LayerNorm/moments/SquaredDifference" -> "741 bert/encoder/layer_1/attention/output/LayerNorm/moments/SquaredDifference__323" [label="[]", style=solid]; +"741 bert/encoder/layer_1/attention/output/LayerNorm/moments/SquaredDifference__323" -> "742 bert/encoder/layer_1/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"742 bert/encoder/layer_1/attention/output/LayerNorm/moments/variance" -> "743 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"743 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add" -> "744 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"744 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/Rsqrt" -> "745 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/Rsqrt__325" [label="[]", style=solid]; +"745 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/Rsqrt__325" -> "746 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"746 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul" -> "747 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"746 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul" -> "749 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"747 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_2" -> "748 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"748 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/sub" -> "750 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"749 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_1" -> "750 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"750 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1" -> "751 QuantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"750 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1" -> "771 bert/encoder/layer_1/output/add" [label="[]", style=solid]; +"751 QuantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "752 DequantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"752 DequantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "755 bert/encoder/layer_1/intermediate/dense/MatMul" [label="[]", style=solid]; +"753 QuantizeLinear_bert/encoder/layer_1/intermediate/dense/kernel^0_1" -> "754 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"754 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/kernel^0_1" -> "755 bert/encoder/layer_1/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"755 bert/encoder/layer_1/intermediate/dense/MatMul" -> "756 bert/encoder/layer_1/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"756 bert/encoder/layer_1/intermediate/dense/BiasAdd" -> "757 bert/encoder/layer_1/intermediate/dense/Pow" [label="[]", style=solid]; +"756 bert/encoder/layer_1/intermediate/dense/BiasAdd" -> "759 bert/encoder/layer_1/intermediate/dense/add" [label="[]", style=solid]; +"756 bert/encoder/layer_1/intermediate/dense/BiasAdd" -> "764 bert/encoder/layer_1/intermediate/dense/mul_3" [label="[]", style=solid]; +"757 bert/encoder/layer_1/intermediate/dense/Pow" -> "758 bert/encoder/layer_1/intermediate/dense/mul" [label="[]", style=solid]; +"758 bert/encoder/layer_1/intermediate/dense/mul" -> "759 bert/encoder/layer_1/intermediate/dense/add" [label="[]", style=solid]; +"759 bert/encoder/layer_1/intermediate/dense/add" -> "760 bert/encoder/layer_1/intermediate/dense/mul_1" [label="[]", style=solid]; +"760 bert/encoder/layer_1/intermediate/dense/mul_1" -> "761 bert/encoder/layer_1/intermediate/dense/Tanh" [label="[]", style=solid]; +"761 bert/encoder/layer_1/intermediate/dense/Tanh" -> "762 bert/encoder/layer_1/intermediate/dense/add_1" [label="[]", style=solid]; +"762 bert/encoder/layer_1/intermediate/dense/add_1" -> "763 bert/encoder/layer_1/intermediate/dense/mul_2" [label="[]", style=solid]; +"763 bert/encoder/layer_1/intermediate/dense/mul_2" -> "764 bert/encoder/layer_1/intermediate/dense/mul_3" [label="[]", style=solid]; +"764 bert/encoder/layer_1/intermediate/dense/mul_3" -> "765 QuantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"765 QuantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3^0_1" -> "766 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"766 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3^0_1" -> "769 bert/encoder/layer_1/output/dense/MatMul" [label="[]", style=solid]; +"767 QuantizeLinear_bert/encoder/layer_1/output/dense/kernel^0_1" -> "768 DequantizeLinear_bert/encoder/layer_1/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"768 DequantizeLinear_bert/encoder/layer_1/output/dense/kernel^0_1" -> "769 bert/encoder/layer_1/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"769 bert/encoder/layer_1/output/dense/MatMul" -> "770 bert/encoder/layer_1/output/dense/BiasAdd" [label="[]", style=solid]; +"770 bert/encoder/layer_1/output/dense/BiasAdd" -> "771 bert/encoder/layer_1/output/add" [label="[]", style=solid]; +"771 bert/encoder/layer_1/output/add" -> "772 bert/encoder/layer_1/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"771 bert/encoder/layer_1/output/add" -> "774 bert/encoder/layer_1/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"771 bert/encoder/layer_1/output/add" -> "783 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"772 bert/encoder/layer_1/output/LayerNorm/moments/mean" -> "773 bert/encoder/layer_1/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"772 bert/encoder/layer_1/output/LayerNorm/moments/mean" -> "781 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"773 bert/encoder/layer_1/output/LayerNorm/moments/StopGradient" -> "774 bert/encoder/layer_1/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"774 bert/encoder/layer_1/output/LayerNorm/moments/SquaredDifference" -> "775 bert/encoder/layer_1/output/LayerNorm/moments/SquaredDifference__327" [label="[]", style=solid]; +"775 bert/encoder/layer_1/output/LayerNorm/moments/SquaredDifference__327" -> "776 bert/encoder/layer_1/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"776 bert/encoder/layer_1/output/LayerNorm/moments/variance" -> "777 bert/encoder/layer_1/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"777 bert/encoder/layer_1/output/LayerNorm/batchnorm/add" -> "778 bert/encoder/layer_1/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"778 bert/encoder/layer_1/output/LayerNorm/batchnorm/Rsqrt" -> "779 bert/encoder/layer_1/output/LayerNorm/batchnorm/Rsqrt__329" [label="[]", style=solid]; +"779 bert/encoder/layer_1/output/LayerNorm/batchnorm/Rsqrt__329" -> "780 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"780 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul" -> "781 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"780 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul" -> "783 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"781 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_2" -> "782 bert/encoder/layer_1/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"782 bert/encoder/layer_1/output/LayerNorm/batchnorm/sub" -> "784 bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"783 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_1" -> "784 bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"784 bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1" -> "785 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"784 bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1" -> "789 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"784 bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1" -> "791 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"784 bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1" -> "830 bert/encoder/layer_2/attention/output/add" [label="[]", style=solid]; +"785 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_1" -> "786 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"786 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_1" -> "793 bert/encoder/layer_2/attention/self/value/MatMul" [label="[]", style=solid]; +"787 QuantizeLinear_bert/encoder/layer_2/attention/self/value/kernel^0_1" -> "788 DequantizeLinear_bert/encoder/layer_2/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"788 DequantizeLinear_bert/encoder/layer_2/attention/self/value/kernel^0_1" -> "793 bert/encoder/layer_2/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"789 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_2" -> "790 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"790 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_2" -> "799 bert/encoder/layer_2/attention/self/query/MatMul" [label="[]", style=solid]; +"791 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_3" -> "792 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"792 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_3" -> "807 bert/encoder/layer_2/attention/self/key/MatMul" [label="[]", style=solid]; +"793 bert/encoder/layer_2/attention/self/value/MatMul" -> "794 bert/encoder/layer_2/attention/self/value/BiasAdd" [label="[]", style=solid]; +"794 bert/encoder/layer_2/attention/self/value/BiasAdd" -> "795 bert/encoder/layer_2/attention/self/Reshape_2" [label="[]", style=solid]; +"795 bert/encoder/layer_2/attention/self/Reshape_2" -> "796 bert/encoder/layer_2/attention/self/transpose_2" [label="[]", style=solid]; +"796 bert/encoder/layer_2/attention/self/transpose_2" -> "821 bert/encoder/layer_2/attention/self/MatMul_1" [label="[]", style=solid]; +"797 QuantizeLinear_bert/encoder/layer_2/attention/self/query/kernel^0_1" -> "798 DequantizeLinear_bert/encoder/layer_2/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"798 DequantizeLinear_bert/encoder/layer_2/attention/self/query/kernel^0_1" -> "799 bert/encoder/layer_2/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"799 bert/encoder/layer_2/attention/self/query/MatMul" -> "800 bert/encoder/layer_2/attention/self/query/BiasAdd" [label="[]", style=solid]; +"800 bert/encoder/layer_2/attention/self/query/BiasAdd" -> "801 QuantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"801 QuantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd^0_1" -> "802 DequantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"802 DequantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd^0_1" -> "803 bert/encoder/layer_2/attention/self/Reshape" [label="[]", style=solid]; +"803 bert/encoder/layer_2/attention/self/Reshape" -> "804 bert/encoder/layer_2/attention/self/transpose" [label="[]", style=solid]; +"804 bert/encoder/layer_2/attention/self/transpose" -> "814 bert/encoder/layer_2/attention/self/MatMul" [label="[]", style=solid]; +"805 QuantizeLinear_bert/encoder/layer_2/attention/self/key/kernel^0_1" -> "806 DequantizeLinear_bert/encoder/layer_2/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"806 DequantizeLinear_bert/encoder/layer_2/attention/self/key/kernel^0_1" -> "807 bert/encoder/layer_2/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"807 bert/encoder/layer_2/attention/self/key/MatMul" -> "808 bert/encoder/layer_2/attention/self/key/BiasAdd" [label="[]", style=solid]; +"808 bert/encoder/layer_2/attention/self/key/BiasAdd" -> "809 QuantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"809 QuantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd^0_1" -> "810 DequantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"810 DequantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd^0_1" -> "811 bert/encoder/layer_2/attention/self/Reshape_1" [label="[]", style=solid]; +"811 bert/encoder/layer_2/attention/self/Reshape_1" -> "812 bert/encoder/layer_2/attention/self/transpose_1" [label="[]", style=solid]; +"812 bert/encoder/layer_2/attention/self/transpose_1" -> "813 bert/encoder/layer_2/attention/self/MatMul__334" [label="[]", style=solid]; +"813 bert/encoder/layer_2/attention/self/MatMul__334" -> "814 bert/encoder/layer_2/attention/self/MatMul" [label="[]", style=solid]; +"814 bert/encoder/layer_2/attention/self/MatMul" -> "815 bert/encoder/layer_2/attention/self/Mul" [label="[]", style=solid]; +"815 bert/encoder/layer_2/attention/self/Mul" -> "816 bert/encoder/layer_2/attention/self/add" [label="[]", style=solid]; +"816 bert/encoder/layer_2/attention/self/add" -> "817 Shape_nncf_739" [label="[]", style=solid]; +"816 bert/encoder/layer_2/attention/self/add" -> "818 Flatten_nncf_740" [label="[]", style=solid]; +"817 Shape_nncf_739" -> "820 Reshape_nncf_742" [label="[-1]", style=dashed]; +"818 Flatten_nncf_740" -> "819 bert/encoder/layer_2/attention/self/Softmax" [label="[]", style=solid]; +"819 bert/encoder/layer_2/attention/self/Softmax" -> "820 Reshape_nncf_742" [label="[]", style=solid]; +"820 Reshape_nncf_742" -> "821 bert/encoder/layer_2/attention/self/MatMul_1" [label="[]", style=solid]; +"821 bert/encoder/layer_2/attention/self/MatMul_1" -> "822 QuantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"822 QuantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1^0_1" -> "823 DequantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"823 DequantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1^0_1" -> "824 bert/encoder/layer_2/attention/self/transpose_3" [label="[]", style=solid]; +"824 bert/encoder/layer_2/attention/self/transpose_3" -> "825 bert/encoder/layer_2/attention/self/Reshape_3" [label="[]", style=solid]; +"825 bert/encoder/layer_2/attention/self/Reshape_3" -> "828 bert/encoder/layer_2/attention/output/dense/MatMul" [label="[]", style=solid]; +"826 QuantizeLinear_bert/encoder/layer_2/attention/output/dense/kernel^0_1" -> "827 DequantizeLinear_bert/encoder/layer_2/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"827 DequantizeLinear_bert/encoder/layer_2/attention/output/dense/kernel^0_1" -> "828 bert/encoder/layer_2/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"828 bert/encoder/layer_2/attention/output/dense/MatMul" -> "829 bert/encoder/layer_2/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"829 bert/encoder/layer_2/attention/output/dense/BiasAdd" -> "830 bert/encoder/layer_2/attention/output/add" [label="[]", style=solid]; +"830 bert/encoder/layer_2/attention/output/add" -> "831 bert/encoder/layer_2/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"830 bert/encoder/layer_2/attention/output/add" -> "833 bert/encoder/layer_2/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"830 bert/encoder/layer_2/attention/output/add" -> "842 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"831 bert/encoder/layer_2/attention/output/LayerNorm/moments/mean" -> "832 bert/encoder/layer_2/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"831 bert/encoder/layer_2/attention/output/LayerNorm/moments/mean" -> "840 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"832 bert/encoder/layer_2/attention/output/LayerNorm/moments/StopGradient" -> "833 bert/encoder/layer_2/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"833 bert/encoder/layer_2/attention/output/LayerNorm/moments/SquaredDifference" -> "834 bert/encoder/layer_2/attention/output/LayerNorm/moments/SquaredDifference__337" [label="[]", style=solid]; +"834 bert/encoder/layer_2/attention/output/LayerNorm/moments/SquaredDifference__337" -> "835 bert/encoder/layer_2/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"835 bert/encoder/layer_2/attention/output/LayerNorm/moments/variance" -> "836 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"836 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add" -> "837 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"837 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/Rsqrt" -> "838 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/Rsqrt__339" [label="[]", style=solid]; +"838 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/Rsqrt__339" -> "839 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"839 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul" -> "840 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"839 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul" -> "842 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"840 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_2" -> "841 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"841 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/sub" -> "843 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"842 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_1" -> "843 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"843 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1" -> "844 QuantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"843 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1" -> "864 bert/encoder/layer_2/output/add" [label="[]", style=solid]; +"844 QuantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "845 DequantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"845 DequantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "848 bert/encoder/layer_2/intermediate/dense/MatMul" [label="[]", style=solid]; +"846 QuantizeLinear_bert/encoder/layer_2/intermediate/dense/kernel^0_1" -> "847 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"847 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/kernel^0_1" -> "848 bert/encoder/layer_2/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"848 bert/encoder/layer_2/intermediate/dense/MatMul" -> "849 bert/encoder/layer_2/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"849 bert/encoder/layer_2/intermediate/dense/BiasAdd" -> "850 bert/encoder/layer_2/intermediate/dense/Pow" [label="[]", style=solid]; +"849 bert/encoder/layer_2/intermediate/dense/BiasAdd" -> "852 bert/encoder/layer_2/intermediate/dense/add" [label="[]", style=solid]; +"849 bert/encoder/layer_2/intermediate/dense/BiasAdd" -> "857 bert/encoder/layer_2/intermediate/dense/mul_3" [label="[]", style=solid]; +"850 bert/encoder/layer_2/intermediate/dense/Pow" -> "851 bert/encoder/layer_2/intermediate/dense/mul" [label="[]", style=solid]; +"851 bert/encoder/layer_2/intermediate/dense/mul" -> "852 bert/encoder/layer_2/intermediate/dense/add" [label="[]", style=solid]; +"852 bert/encoder/layer_2/intermediate/dense/add" -> "853 bert/encoder/layer_2/intermediate/dense/mul_1" [label="[]", style=solid]; +"853 bert/encoder/layer_2/intermediate/dense/mul_1" -> "854 bert/encoder/layer_2/intermediate/dense/Tanh" [label="[]", style=solid]; +"854 bert/encoder/layer_2/intermediate/dense/Tanh" -> "855 bert/encoder/layer_2/intermediate/dense/add_1" [label="[]", style=solid]; +"855 bert/encoder/layer_2/intermediate/dense/add_1" -> "856 bert/encoder/layer_2/intermediate/dense/mul_2" [label="[]", style=solid]; +"856 bert/encoder/layer_2/intermediate/dense/mul_2" -> "857 bert/encoder/layer_2/intermediate/dense/mul_3" [label="[]", style=solid]; +"857 bert/encoder/layer_2/intermediate/dense/mul_3" -> "858 QuantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"858 QuantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3^0_1" -> "859 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"859 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3^0_1" -> "862 bert/encoder/layer_2/output/dense/MatMul" [label="[]", style=solid]; +"860 QuantizeLinear_bert/encoder/layer_2/output/dense/kernel^0_1" -> "861 DequantizeLinear_bert/encoder/layer_2/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"861 DequantizeLinear_bert/encoder/layer_2/output/dense/kernel^0_1" -> "862 bert/encoder/layer_2/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"862 bert/encoder/layer_2/output/dense/MatMul" -> "863 bert/encoder/layer_2/output/dense/BiasAdd" [label="[]", style=solid]; +"863 bert/encoder/layer_2/output/dense/BiasAdd" -> "864 bert/encoder/layer_2/output/add" [label="[]", style=solid]; +"864 bert/encoder/layer_2/output/add" -> "865 bert/encoder/layer_2/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"864 bert/encoder/layer_2/output/add" -> "867 bert/encoder/layer_2/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"864 bert/encoder/layer_2/output/add" -> "876 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"865 bert/encoder/layer_2/output/LayerNorm/moments/mean" -> "866 bert/encoder/layer_2/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"865 bert/encoder/layer_2/output/LayerNorm/moments/mean" -> "874 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"866 bert/encoder/layer_2/output/LayerNorm/moments/StopGradient" -> "867 bert/encoder/layer_2/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"867 bert/encoder/layer_2/output/LayerNorm/moments/SquaredDifference" -> "868 bert/encoder/layer_2/output/LayerNorm/moments/SquaredDifference__341" [label="[]", style=solid]; +"868 bert/encoder/layer_2/output/LayerNorm/moments/SquaredDifference__341" -> "869 bert/encoder/layer_2/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"869 bert/encoder/layer_2/output/LayerNorm/moments/variance" -> "870 bert/encoder/layer_2/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"870 bert/encoder/layer_2/output/LayerNorm/batchnorm/add" -> "871 bert/encoder/layer_2/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"871 bert/encoder/layer_2/output/LayerNorm/batchnorm/Rsqrt" -> "872 bert/encoder/layer_2/output/LayerNorm/batchnorm/Rsqrt__343" [label="[]", style=solid]; +"872 bert/encoder/layer_2/output/LayerNorm/batchnorm/Rsqrt__343" -> "873 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"873 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul" -> "874 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"873 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul" -> "876 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"874 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_2" -> "875 bert/encoder/layer_2/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"875 bert/encoder/layer_2/output/LayerNorm/batchnorm/sub" -> "877 bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"876 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_1" -> "877 bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"877 bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1" -> "878 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"877 bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1" -> "882 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"877 bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1" -> "884 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"877 bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1" -> "923 bert/encoder/layer_3/attention/output/add" [label="[]", style=solid]; +"878 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_1" -> "879 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"879 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_1" -> "886 bert/encoder/layer_3/attention/self/value/MatMul" [label="[]", style=solid]; +"880 QuantizeLinear_bert/encoder/layer_3/attention/self/value/kernel^0_1" -> "881 DequantizeLinear_bert/encoder/layer_3/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"881 DequantizeLinear_bert/encoder/layer_3/attention/self/value/kernel^0_1" -> "886 bert/encoder/layer_3/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"882 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_2" -> "883 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"883 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_2" -> "892 bert/encoder/layer_3/attention/self/query/MatMul" [label="[]", style=solid]; +"884 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_3" -> "885 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"885 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_3" -> "900 bert/encoder/layer_3/attention/self/key/MatMul" [label="[]", style=solid]; +"886 bert/encoder/layer_3/attention/self/value/MatMul" -> "887 bert/encoder/layer_3/attention/self/value/BiasAdd" [label="[]", style=solid]; +"887 bert/encoder/layer_3/attention/self/value/BiasAdd" -> "888 bert/encoder/layer_3/attention/self/Reshape_2" [label="[]", style=solid]; +"888 bert/encoder/layer_3/attention/self/Reshape_2" -> "889 bert/encoder/layer_3/attention/self/transpose_2" [label="[]", style=solid]; +"889 bert/encoder/layer_3/attention/self/transpose_2" -> "914 bert/encoder/layer_3/attention/self/MatMul_1" [label="[]", style=solid]; +"890 QuantizeLinear_bert/encoder/layer_3/attention/self/query/kernel^0_1" -> "891 DequantizeLinear_bert/encoder/layer_3/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"891 DequantizeLinear_bert/encoder/layer_3/attention/self/query/kernel^0_1" -> "892 bert/encoder/layer_3/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"892 bert/encoder/layer_3/attention/self/query/MatMul" -> "893 bert/encoder/layer_3/attention/self/query/BiasAdd" [label="[]", style=solid]; +"893 bert/encoder/layer_3/attention/self/query/BiasAdd" -> "894 QuantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"894 QuantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd^0_1" -> "895 DequantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"895 DequantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd^0_1" -> "896 bert/encoder/layer_3/attention/self/Reshape" [label="[]", style=solid]; +"896 bert/encoder/layer_3/attention/self/Reshape" -> "897 bert/encoder/layer_3/attention/self/transpose" [label="[]", style=solid]; +"897 bert/encoder/layer_3/attention/self/transpose" -> "907 bert/encoder/layer_3/attention/self/MatMul" [label="[]", style=solid]; +"898 QuantizeLinear_bert/encoder/layer_3/attention/self/key/kernel^0_1" -> "899 DequantizeLinear_bert/encoder/layer_3/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"899 DequantizeLinear_bert/encoder/layer_3/attention/self/key/kernel^0_1" -> "900 bert/encoder/layer_3/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"900 bert/encoder/layer_3/attention/self/key/MatMul" -> "901 bert/encoder/layer_3/attention/self/key/BiasAdd" [label="[]", style=solid]; +"901 bert/encoder/layer_3/attention/self/key/BiasAdd" -> "902 QuantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"902 QuantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd^0_1" -> "903 DequantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"903 DequantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd^0_1" -> "904 bert/encoder/layer_3/attention/self/Reshape_1" [label="[]", style=solid]; +"904 bert/encoder/layer_3/attention/self/Reshape_1" -> "905 bert/encoder/layer_3/attention/self/transpose_1" [label="[]", style=solid]; +"905 bert/encoder/layer_3/attention/self/transpose_1" -> "906 bert/encoder/layer_3/attention/self/MatMul__348" [label="[]", style=solid]; +"906 bert/encoder/layer_3/attention/self/MatMul__348" -> "907 bert/encoder/layer_3/attention/self/MatMul" [label="[]", style=solid]; +"907 bert/encoder/layer_3/attention/self/MatMul" -> "908 bert/encoder/layer_3/attention/self/Mul" [label="[]", style=solid]; +"908 bert/encoder/layer_3/attention/self/Mul" -> "909 bert/encoder/layer_3/attention/self/add" [label="[]", style=solid]; +"909 bert/encoder/layer_3/attention/self/add" -> "910 Shape_nncf_804" [label="[]", style=solid]; +"909 bert/encoder/layer_3/attention/self/add" -> "911 Flatten_nncf_805" [label="[]", style=solid]; +"910 Shape_nncf_804" -> "913 Reshape_nncf_807" [label="[-1]", style=dashed]; +"911 Flatten_nncf_805" -> "912 bert/encoder/layer_3/attention/self/Softmax" [label="[]", style=solid]; +"912 bert/encoder/layer_3/attention/self/Softmax" -> "913 Reshape_nncf_807" [label="[]", style=solid]; +"913 Reshape_nncf_807" -> "914 bert/encoder/layer_3/attention/self/MatMul_1" [label="[]", style=solid]; +"914 bert/encoder/layer_3/attention/self/MatMul_1" -> "915 QuantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"915 QuantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1^0_1" -> "916 DequantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"916 DequantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1^0_1" -> "917 bert/encoder/layer_3/attention/self/transpose_3" [label="[]", style=solid]; +"917 bert/encoder/layer_3/attention/self/transpose_3" -> "918 bert/encoder/layer_3/attention/self/Reshape_3" [label="[]", style=solid]; +"918 bert/encoder/layer_3/attention/self/Reshape_3" -> "921 bert/encoder/layer_3/attention/output/dense/MatMul" [label="[]", style=solid]; +"919 QuantizeLinear_bert/encoder/layer_3/attention/output/dense/kernel^0_1" -> "920 DequantizeLinear_bert/encoder/layer_3/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"920 DequantizeLinear_bert/encoder/layer_3/attention/output/dense/kernel^0_1" -> "921 bert/encoder/layer_3/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"921 bert/encoder/layer_3/attention/output/dense/MatMul" -> "922 bert/encoder/layer_3/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"922 bert/encoder/layer_3/attention/output/dense/BiasAdd" -> "923 bert/encoder/layer_3/attention/output/add" [label="[]", style=solid]; +"923 bert/encoder/layer_3/attention/output/add" -> "924 bert/encoder/layer_3/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"923 bert/encoder/layer_3/attention/output/add" -> "926 bert/encoder/layer_3/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"923 bert/encoder/layer_3/attention/output/add" -> "935 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"924 bert/encoder/layer_3/attention/output/LayerNorm/moments/mean" -> "925 bert/encoder/layer_3/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"924 bert/encoder/layer_3/attention/output/LayerNorm/moments/mean" -> "933 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"925 bert/encoder/layer_3/attention/output/LayerNorm/moments/StopGradient" -> "926 bert/encoder/layer_3/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"926 bert/encoder/layer_3/attention/output/LayerNorm/moments/SquaredDifference" -> "927 bert/encoder/layer_3/attention/output/LayerNorm/moments/SquaredDifference__351" [label="[]", style=solid]; +"927 bert/encoder/layer_3/attention/output/LayerNorm/moments/SquaredDifference__351" -> "928 bert/encoder/layer_3/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"928 bert/encoder/layer_3/attention/output/LayerNorm/moments/variance" -> "929 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"929 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add" -> "930 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"930 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/Rsqrt" -> "931 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/Rsqrt__353" [label="[]", style=solid]; +"931 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/Rsqrt__353" -> "932 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"932 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul" -> "933 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"932 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul" -> "935 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"933 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_2" -> "934 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"934 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/sub" -> "936 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"935 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_1" -> "936 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"936 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1" -> "937 QuantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"936 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1" -> "957 bert/encoder/layer_3/output/add" [label="[]", style=solid]; +"937 QuantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "938 DequantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"938 DequantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "941 bert/encoder/layer_3/intermediate/dense/MatMul" [label="[]", style=solid]; +"939 QuantizeLinear_bert/encoder/layer_3/intermediate/dense/kernel^0_1" -> "940 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"940 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/kernel^0_1" -> "941 bert/encoder/layer_3/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"941 bert/encoder/layer_3/intermediate/dense/MatMul" -> "942 bert/encoder/layer_3/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"942 bert/encoder/layer_3/intermediate/dense/BiasAdd" -> "943 bert/encoder/layer_3/intermediate/dense/Pow" [label="[]", style=solid]; +"942 bert/encoder/layer_3/intermediate/dense/BiasAdd" -> "945 bert/encoder/layer_3/intermediate/dense/add" [label="[]", style=solid]; +"942 bert/encoder/layer_3/intermediate/dense/BiasAdd" -> "950 bert/encoder/layer_3/intermediate/dense/mul_3" [label="[]", style=solid]; +"943 bert/encoder/layer_3/intermediate/dense/Pow" -> "944 bert/encoder/layer_3/intermediate/dense/mul" [label="[]", style=solid]; +"944 bert/encoder/layer_3/intermediate/dense/mul" -> "945 bert/encoder/layer_3/intermediate/dense/add" [label="[]", style=solid]; +"945 bert/encoder/layer_3/intermediate/dense/add" -> "946 bert/encoder/layer_3/intermediate/dense/mul_1" [label="[]", style=solid]; +"946 bert/encoder/layer_3/intermediate/dense/mul_1" -> "947 bert/encoder/layer_3/intermediate/dense/Tanh" [label="[]", style=solid]; +"947 bert/encoder/layer_3/intermediate/dense/Tanh" -> "948 bert/encoder/layer_3/intermediate/dense/add_1" [label="[]", style=solid]; +"948 bert/encoder/layer_3/intermediate/dense/add_1" -> "949 bert/encoder/layer_3/intermediate/dense/mul_2" [label="[]", style=solid]; +"949 bert/encoder/layer_3/intermediate/dense/mul_2" -> "950 bert/encoder/layer_3/intermediate/dense/mul_3" [label="[]", style=solid]; +"950 bert/encoder/layer_3/intermediate/dense/mul_3" -> "951 QuantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"951 QuantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3^0_1" -> "952 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"952 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3^0_1" -> "955 bert/encoder/layer_3/output/dense/MatMul" [label="[]", style=solid]; +"953 QuantizeLinear_bert/encoder/layer_3/output/dense/kernel^0_1" -> "954 DequantizeLinear_bert/encoder/layer_3/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"954 DequantizeLinear_bert/encoder/layer_3/output/dense/kernel^0_1" -> "955 bert/encoder/layer_3/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"955 bert/encoder/layer_3/output/dense/MatMul" -> "956 bert/encoder/layer_3/output/dense/BiasAdd" [label="[]", style=solid]; +"956 bert/encoder/layer_3/output/dense/BiasAdd" -> "957 bert/encoder/layer_3/output/add" [label="[]", style=solid]; +"957 bert/encoder/layer_3/output/add" -> "958 bert/encoder/layer_3/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"957 bert/encoder/layer_3/output/add" -> "960 bert/encoder/layer_3/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"957 bert/encoder/layer_3/output/add" -> "969 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"958 bert/encoder/layer_3/output/LayerNorm/moments/mean" -> "959 bert/encoder/layer_3/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"958 bert/encoder/layer_3/output/LayerNorm/moments/mean" -> "967 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"959 bert/encoder/layer_3/output/LayerNorm/moments/StopGradient" -> "960 bert/encoder/layer_3/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"960 bert/encoder/layer_3/output/LayerNorm/moments/SquaredDifference" -> "961 bert/encoder/layer_3/output/LayerNorm/moments/SquaredDifference__355" [label="[]", style=solid]; +"961 bert/encoder/layer_3/output/LayerNorm/moments/SquaredDifference__355" -> "962 bert/encoder/layer_3/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"962 bert/encoder/layer_3/output/LayerNorm/moments/variance" -> "963 bert/encoder/layer_3/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"963 bert/encoder/layer_3/output/LayerNorm/batchnorm/add" -> "964 bert/encoder/layer_3/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"964 bert/encoder/layer_3/output/LayerNorm/batchnorm/Rsqrt" -> "965 bert/encoder/layer_3/output/LayerNorm/batchnorm/Rsqrt__357" [label="[]", style=solid]; +"965 bert/encoder/layer_3/output/LayerNorm/batchnorm/Rsqrt__357" -> "966 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"966 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul" -> "967 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"966 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul" -> "969 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"967 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_2" -> "968 bert/encoder/layer_3/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"968 bert/encoder/layer_3/output/LayerNorm/batchnorm/sub" -> "970 bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"969 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_1" -> "970 bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"970 bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1" -> "971 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"970 bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1" -> "975 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"970 bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1" -> "977 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"970 bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1" -> "1016 bert/encoder/layer_4/attention/output/add" [label="[]", style=solid]; +"971 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_1" -> "972 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"972 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_1" -> "979 bert/encoder/layer_4/attention/self/value/MatMul" [label="[]", style=solid]; +"973 QuantizeLinear_bert/encoder/layer_4/attention/self/value/kernel^0_1" -> "974 DequantizeLinear_bert/encoder/layer_4/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"974 DequantizeLinear_bert/encoder/layer_4/attention/self/value/kernel^0_1" -> "979 bert/encoder/layer_4/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"975 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_2" -> "976 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"976 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_2" -> "985 bert/encoder/layer_4/attention/self/query/MatMul" [label="[]", style=solid]; +"977 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_3" -> "978 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"978 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_3" -> "993 bert/encoder/layer_4/attention/self/key/MatMul" [label="[]", style=solid]; +"979 bert/encoder/layer_4/attention/self/value/MatMul" -> "980 bert/encoder/layer_4/attention/self/value/BiasAdd" [label="[]", style=solid]; +"980 bert/encoder/layer_4/attention/self/value/BiasAdd" -> "981 bert/encoder/layer_4/attention/self/Reshape_2" [label="[]", style=solid]; +"981 bert/encoder/layer_4/attention/self/Reshape_2" -> "982 bert/encoder/layer_4/attention/self/transpose_2" [label="[]", style=solid]; +"982 bert/encoder/layer_4/attention/self/transpose_2" -> "1007 bert/encoder/layer_4/attention/self/MatMul_1" [label="[]", style=solid]; +"983 QuantizeLinear_bert/encoder/layer_4/attention/self/query/kernel^0_1" -> "984 DequantizeLinear_bert/encoder/layer_4/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"984 DequantizeLinear_bert/encoder/layer_4/attention/self/query/kernel^0_1" -> "985 bert/encoder/layer_4/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"985 bert/encoder/layer_4/attention/self/query/MatMul" -> "986 bert/encoder/layer_4/attention/self/query/BiasAdd" [label="[]", style=solid]; +"986 bert/encoder/layer_4/attention/self/query/BiasAdd" -> "987 QuantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"987 QuantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd^0_1" -> "988 DequantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"988 DequantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd^0_1" -> "989 bert/encoder/layer_4/attention/self/Reshape" [label="[]", style=solid]; +"989 bert/encoder/layer_4/attention/self/Reshape" -> "990 bert/encoder/layer_4/attention/self/transpose" [label="[]", style=solid]; +"990 bert/encoder/layer_4/attention/self/transpose" -> "1000 bert/encoder/layer_4/attention/self/MatMul" [label="[]", style=solid]; +"991 QuantizeLinear_bert/encoder/layer_4/attention/self/key/kernel^0_1" -> "992 DequantizeLinear_bert/encoder/layer_4/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"992 DequantizeLinear_bert/encoder/layer_4/attention/self/key/kernel^0_1" -> "993 bert/encoder/layer_4/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"993 bert/encoder/layer_4/attention/self/key/MatMul" -> "994 bert/encoder/layer_4/attention/self/key/BiasAdd" [label="[]", style=solid]; +"994 bert/encoder/layer_4/attention/self/key/BiasAdd" -> "995 QuantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"995 QuantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd^0_1" -> "996 DequantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"996 DequantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd^0_1" -> "997 bert/encoder/layer_4/attention/self/Reshape_1" [label="[]", style=solid]; +"997 bert/encoder/layer_4/attention/self/Reshape_1" -> "998 bert/encoder/layer_4/attention/self/transpose_1" [label="[]", style=solid]; +"998 bert/encoder/layer_4/attention/self/transpose_1" -> "999 bert/encoder/layer_4/attention/self/MatMul__362" [label="[]", style=solid]; +"999 bert/encoder/layer_4/attention/self/MatMul__362" -> "1000 bert/encoder/layer_4/attention/self/MatMul" [label="[]", style=solid]; +"1000 bert/encoder/layer_4/attention/self/MatMul" -> "1001 bert/encoder/layer_4/attention/self/Mul" [label="[]", style=solid]; +"1001 bert/encoder/layer_4/attention/self/Mul" -> "1002 bert/encoder/layer_4/attention/self/add" [label="[]", style=solid]; +"1002 bert/encoder/layer_4/attention/self/add" -> "1003 Shape_nncf_869" [label="[]", style=solid]; +"1002 bert/encoder/layer_4/attention/self/add" -> "1004 Flatten_nncf_870" [label="[]", style=solid]; +"1003 Shape_nncf_869" -> "1006 Reshape_nncf_872" [label="[-1]", style=dashed]; +"1004 Flatten_nncf_870" -> "1005 bert/encoder/layer_4/attention/self/Softmax" [label="[]", style=solid]; +"1005 bert/encoder/layer_4/attention/self/Softmax" -> "1006 Reshape_nncf_872" [label="[]", style=solid]; +"1006 Reshape_nncf_872" -> "1007 bert/encoder/layer_4/attention/self/MatMul_1" [label="[]", style=solid]; +"1007 bert/encoder/layer_4/attention/self/MatMul_1" -> "1008 QuantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"1008 QuantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1^0_1" -> "1009 DequantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"1009 DequantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1^0_1" -> "1010 bert/encoder/layer_4/attention/self/transpose_3" [label="[]", style=solid]; +"1010 bert/encoder/layer_4/attention/self/transpose_3" -> "1011 bert/encoder/layer_4/attention/self/Reshape_3" [label="[]", style=solid]; +"1011 bert/encoder/layer_4/attention/self/Reshape_3" -> "1014 bert/encoder/layer_4/attention/output/dense/MatMul" [label="[]", style=solid]; +"1012 QuantizeLinear_bert/encoder/layer_4/attention/output/dense/kernel^0_1" -> "1013 DequantizeLinear_bert/encoder/layer_4/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"1013 DequantizeLinear_bert/encoder/layer_4/attention/output/dense/kernel^0_1" -> "1014 bert/encoder/layer_4/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"1014 bert/encoder/layer_4/attention/output/dense/MatMul" -> "1015 bert/encoder/layer_4/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"1015 bert/encoder/layer_4/attention/output/dense/BiasAdd" -> "1016 bert/encoder/layer_4/attention/output/add" [label="[]", style=solid]; +"1016 bert/encoder/layer_4/attention/output/add" -> "1017 bert/encoder/layer_4/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1016 bert/encoder/layer_4/attention/output/add" -> "1019 bert/encoder/layer_4/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1016 bert/encoder/layer_4/attention/output/add" -> "1028 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1017 bert/encoder/layer_4/attention/output/LayerNorm/moments/mean" -> "1018 bert/encoder/layer_4/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1017 bert/encoder/layer_4/attention/output/LayerNorm/moments/mean" -> "1026 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1018 bert/encoder/layer_4/attention/output/LayerNorm/moments/StopGradient" -> "1019 bert/encoder/layer_4/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1019 bert/encoder/layer_4/attention/output/LayerNorm/moments/SquaredDifference" -> "1020 bert/encoder/layer_4/attention/output/LayerNorm/moments/SquaredDifference__365" [label="[]", style=solid]; +"1020 bert/encoder/layer_4/attention/output/LayerNorm/moments/SquaredDifference__365" -> "1021 bert/encoder/layer_4/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1021 bert/encoder/layer_4/attention/output/LayerNorm/moments/variance" -> "1022 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1022 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add" -> "1023 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1023 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/Rsqrt" -> "1024 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/Rsqrt__367" [label="[]", style=solid]; +"1024 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/Rsqrt__367" -> "1025 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1025 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul" -> "1026 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1025 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul" -> "1028 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1026 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_2" -> "1027 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1027 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/sub" -> "1029 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1028 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_1" -> "1029 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1029 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1" -> "1030 QuantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1029 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1" -> "1050 bert/encoder/layer_4/output/add" [label="[]", style=solid]; +"1030 QuantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1031 DequantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1031 DequantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1034 bert/encoder/layer_4/intermediate/dense/MatMul" [label="[]", style=solid]; +"1032 QuantizeLinear_bert/encoder/layer_4/intermediate/dense/kernel^0_1" -> "1033 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"1033 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/kernel^0_1" -> "1034 bert/encoder/layer_4/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"1034 bert/encoder/layer_4/intermediate/dense/MatMul" -> "1035 bert/encoder/layer_4/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"1035 bert/encoder/layer_4/intermediate/dense/BiasAdd" -> "1036 bert/encoder/layer_4/intermediate/dense/Pow" [label="[]", style=solid]; +"1035 bert/encoder/layer_4/intermediate/dense/BiasAdd" -> "1038 bert/encoder/layer_4/intermediate/dense/add" [label="[]", style=solid]; +"1035 bert/encoder/layer_4/intermediate/dense/BiasAdd" -> "1043 bert/encoder/layer_4/intermediate/dense/mul_3" [label="[]", style=solid]; +"1036 bert/encoder/layer_4/intermediate/dense/Pow" -> "1037 bert/encoder/layer_4/intermediate/dense/mul" [label="[]", style=solid]; +"1037 bert/encoder/layer_4/intermediate/dense/mul" -> "1038 bert/encoder/layer_4/intermediate/dense/add" [label="[]", style=solid]; +"1038 bert/encoder/layer_4/intermediate/dense/add" -> "1039 bert/encoder/layer_4/intermediate/dense/mul_1" [label="[]", style=solid]; +"1039 bert/encoder/layer_4/intermediate/dense/mul_1" -> "1040 bert/encoder/layer_4/intermediate/dense/Tanh" [label="[]", style=solid]; +"1040 bert/encoder/layer_4/intermediate/dense/Tanh" -> "1041 bert/encoder/layer_4/intermediate/dense/add_1" [label="[]", style=solid]; +"1041 bert/encoder/layer_4/intermediate/dense/add_1" -> "1042 bert/encoder/layer_4/intermediate/dense/mul_2" [label="[]", style=solid]; +"1042 bert/encoder/layer_4/intermediate/dense/mul_2" -> "1043 bert/encoder/layer_4/intermediate/dense/mul_3" [label="[]", style=solid]; +"1043 bert/encoder/layer_4/intermediate/dense/mul_3" -> "1044 QuantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"1044 QuantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3^0_1" -> "1045 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"1045 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3^0_1" -> "1048 bert/encoder/layer_4/output/dense/MatMul" [label="[]", style=solid]; +"1046 QuantizeLinear_bert/encoder/layer_4/output/dense/kernel^0_1" -> "1047 DequantizeLinear_bert/encoder/layer_4/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"1047 DequantizeLinear_bert/encoder/layer_4/output/dense/kernel^0_1" -> "1048 bert/encoder/layer_4/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"1048 bert/encoder/layer_4/output/dense/MatMul" -> "1049 bert/encoder/layer_4/output/dense/BiasAdd" [label="[]", style=solid]; +"1049 bert/encoder/layer_4/output/dense/BiasAdd" -> "1050 bert/encoder/layer_4/output/add" [label="[]", style=solid]; +"1050 bert/encoder/layer_4/output/add" -> "1051 bert/encoder/layer_4/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1050 bert/encoder/layer_4/output/add" -> "1053 bert/encoder/layer_4/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1050 bert/encoder/layer_4/output/add" -> "1062 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1051 bert/encoder/layer_4/output/LayerNorm/moments/mean" -> "1052 bert/encoder/layer_4/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1051 bert/encoder/layer_4/output/LayerNorm/moments/mean" -> "1060 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1052 bert/encoder/layer_4/output/LayerNorm/moments/StopGradient" -> "1053 bert/encoder/layer_4/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1053 bert/encoder/layer_4/output/LayerNorm/moments/SquaredDifference" -> "1054 bert/encoder/layer_4/output/LayerNorm/moments/SquaredDifference__369" [label="[]", style=solid]; +"1054 bert/encoder/layer_4/output/LayerNorm/moments/SquaredDifference__369" -> "1055 bert/encoder/layer_4/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1055 bert/encoder/layer_4/output/LayerNorm/moments/variance" -> "1056 bert/encoder/layer_4/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1056 bert/encoder/layer_4/output/LayerNorm/batchnorm/add" -> "1057 bert/encoder/layer_4/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1057 bert/encoder/layer_4/output/LayerNorm/batchnorm/Rsqrt" -> "1058 bert/encoder/layer_4/output/LayerNorm/batchnorm/Rsqrt__371" [label="[]", style=solid]; +"1058 bert/encoder/layer_4/output/LayerNorm/batchnorm/Rsqrt__371" -> "1059 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1059 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul" -> "1060 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1059 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul" -> "1062 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1060 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_2" -> "1061 bert/encoder/layer_4/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1061 bert/encoder/layer_4/output/LayerNorm/batchnorm/sub" -> "1063 bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1062 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_1" -> "1063 bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1063 bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1" -> "1064 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1063 bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1" -> "1068 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"1063 bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1" -> "1070 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"1063 bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1" -> "1109 bert/encoder/layer_5/attention/output/add" [label="[]", style=solid]; +"1064 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_1" -> "1065 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1065 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_1" -> "1072 bert/encoder/layer_5/attention/self/value/MatMul" [label="[]", style=solid]; +"1066 QuantizeLinear_bert/encoder/layer_5/attention/self/value/kernel^0_1" -> "1067 DequantizeLinear_bert/encoder/layer_5/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"1067 DequantizeLinear_bert/encoder/layer_5/attention/self/value/kernel^0_1" -> "1072 bert/encoder/layer_5/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"1068 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_2" -> "1069 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"1069 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_2" -> "1078 bert/encoder/layer_5/attention/self/query/MatMul" [label="[]", style=solid]; +"1070 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_3" -> "1071 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"1071 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_3" -> "1086 bert/encoder/layer_5/attention/self/key/MatMul" [label="[]", style=solid]; +"1072 bert/encoder/layer_5/attention/self/value/MatMul" -> "1073 bert/encoder/layer_5/attention/self/value/BiasAdd" [label="[]", style=solid]; +"1073 bert/encoder/layer_5/attention/self/value/BiasAdd" -> "1074 bert/encoder/layer_5/attention/self/Reshape_2" [label="[]", style=solid]; +"1074 bert/encoder/layer_5/attention/self/Reshape_2" -> "1075 bert/encoder/layer_5/attention/self/transpose_2" [label="[]", style=solid]; +"1075 bert/encoder/layer_5/attention/self/transpose_2" -> "1100 bert/encoder/layer_5/attention/self/MatMul_1" [label="[]", style=solid]; +"1076 QuantizeLinear_bert/encoder/layer_5/attention/self/query/kernel^0_1" -> "1077 DequantizeLinear_bert/encoder/layer_5/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"1077 DequantizeLinear_bert/encoder/layer_5/attention/self/query/kernel^0_1" -> "1078 bert/encoder/layer_5/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"1078 bert/encoder/layer_5/attention/self/query/MatMul" -> "1079 bert/encoder/layer_5/attention/self/query/BiasAdd" [label="[]", style=solid]; +"1079 bert/encoder/layer_5/attention/self/query/BiasAdd" -> "1080 QuantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"1080 QuantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd^0_1" -> "1081 DequantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"1081 DequantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd^0_1" -> "1082 bert/encoder/layer_5/attention/self/Reshape" [label="[]", style=solid]; +"1082 bert/encoder/layer_5/attention/self/Reshape" -> "1083 bert/encoder/layer_5/attention/self/transpose" [label="[]", style=solid]; +"1083 bert/encoder/layer_5/attention/self/transpose" -> "1093 bert/encoder/layer_5/attention/self/MatMul" [label="[]", style=solid]; +"1084 QuantizeLinear_bert/encoder/layer_5/attention/self/key/kernel^0_1" -> "1085 DequantizeLinear_bert/encoder/layer_5/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"1085 DequantizeLinear_bert/encoder/layer_5/attention/self/key/kernel^0_1" -> "1086 bert/encoder/layer_5/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"1086 bert/encoder/layer_5/attention/self/key/MatMul" -> "1087 bert/encoder/layer_5/attention/self/key/BiasAdd" [label="[]", style=solid]; +"1087 bert/encoder/layer_5/attention/self/key/BiasAdd" -> "1088 QuantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"1088 QuantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd^0_1" -> "1089 DequantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"1089 DequantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd^0_1" -> "1090 bert/encoder/layer_5/attention/self/Reshape_1" [label="[]", style=solid]; +"1090 bert/encoder/layer_5/attention/self/Reshape_1" -> "1091 bert/encoder/layer_5/attention/self/transpose_1" [label="[]", style=solid]; +"1091 bert/encoder/layer_5/attention/self/transpose_1" -> "1092 bert/encoder/layer_5/attention/self/MatMul__376" [label="[]", style=solid]; +"1092 bert/encoder/layer_5/attention/self/MatMul__376" -> "1093 bert/encoder/layer_5/attention/self/MatMul" [label="[]", style=solid]; +"1093 bert/encoder/layer_5/attention/self/MatMul" -> "1094 bert/encoder/layer_5/attention/self/Mul" [label="[]", style=solid]; +"1094 bert/encoder/layer_5/attention/self/Mul" -> "1095 bert/encoder/layer_5/attention/self/add" [label="[]", style=solid]; +"1095 bert/encoder/layer_5/attention/self/add" -> "1096 Shape_nncf_934" [label="[]", style=solid]; +"1095 bert/encoder/layer_5/attention/self/add" -> "1097 Flatten_nncf_935" [label="[]", style=solid]; +"1096 Shape_nncf_934" -> "1099 Reshape_nncf_937" [label="[-1]", style=dashed]; +"1097 Flatten_nncf_935" -> "1098 bert/encoder/layer_5/attention/self/Softmax" [label="[]", style=solid]; +"1098 bert/encoder/layer_5/attention/self/Softmax" -> "1099 Reshape_nncf_937" [label="[]", style=solid]; +"1099 Reshape_nncf_937" -> "1100 bert/encoder/layer_5/attention/self/MatMul_1" [label="[]", style=solid]; +"1100 bert/encoder/layer_5/attention/self/MatMul_1" -> "1101 QuantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"1101 QuantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1^0_1" -> "1102 DequantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"1102 DequantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1^0_1" -> "1103 bert/encoder/layer_5/attention/self/transpose_3" [label="[]", style=solid]; +"1103 bert/encoder/layer_5/attention/self/transpose_3" -> "1104 bert/encoder/layer_5/attention/self/Reshape_3" [label="[]", style=solid]; +"1104 bert/encoder/layer_5/attention/self/Reshape_3" -> "1107 bert/encoder/layer_5/attention/output/dense/MatMul" [label="[]", style=solid]; +"1105 QuantizeLinear_bert/encoder/layer_5/attention/output/dense/kernel^0_1" -> "1106 DequantizeLinear_bert/encoder/layer_5/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"1106 DequantizeLinear_bert/encoder/layer_5/attention/output/dense/kernel^0_1" -> "1107 bert/encoder/layer_5/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"1107 bert/encoder/layer_5/attention/output/dense/MatMul" -> "1108 bert/encoder/layer_5/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"1108 bert/encoder/layer_5/attention/output/dense/BiasAdd" -> "1109 bert/encoder/layer_5/attention/output/add" [label="[]", style=solid]; +"1109 bert/encoder/layer_5/attention/output/add" -> "1110 bert/encoder/layer_5/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1109 bert/encoder/layer_5/attention/output/add" -> "1112 bert/encoder/layer_5/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1109 bert/encoder/layer_5/attention/output/add" -> "1121 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1110 bert/encoder/layer_5/attention/output/LayerNorm/moments/mean" -> "1111 bert/encoder/layer_5/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1110 bert/encoder/layer_5/attention/output/LayerNorm/moments/mean" -> "1119 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1111 bert/encoder/layer_5/attention/output/LayerNorm/moments/StopGradient" -> "1112 bert/encoder/layer_5/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1112 bert/encoder/layer_5/attention/output/LayerNorm/moments/SquaredDifference" -> "1113 bert/encoder/layer_5/attention/output/LayerNorm/moments/SquaredDifference__379" [label="[]", style=solid]; +"1113 bert/encoder/layer_5/attention/output/LayerNorm/moments/SquaredDifference__379" -> "1114 bert/encoder/layer_5/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1114 bert/encoder/layer_5/attention/output/LayerNorm/moments/variance" -> "1115 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1115 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add" -> "1116 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1116 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/Rsqrt" -> "1117 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/Rsqrt__381" [label="[]", style=solid]; +"1117 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/Rsqrt__381" -> "1118 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1118 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul" -> "1119 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1118 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul" -> "1121 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1119 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_2" -> "1120 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1120 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/sub" -> "1122 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1121 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_1" -> "1122 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1122 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1" -> "1123 QuantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1122 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1" -> "1143 bert/encoder/layer_5/output/add" [label="[]", style=solid]; +"1123 QuantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1124 DequantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1124 DequantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1127 bert/encoder/layer_5/intermediate/dense/MatMul" [label="[]", style=solid]; +"1125 QuantizeLinear_bert/encoder/layer_5/intermediate/dense/kernel^0_1" -> "1126 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"1126 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/kernel^0_1" -> "1127 bert/encoder/layer_5/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"1127 bert/encoder/layer_5/intermediate/dense/MatMul" -> "1128 bert/encoder/layer_5/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"1128 bert/encoder/layer_5/intermediate/dense/BiasAdd" -> "1129 bert/encoder/layer_5/intermediate/dense/Pow" [label="[]", style=solid]; +"1128 bert/encoder/layer_5/intermediate/dense/BiasAdd" -> "1131 bert/encoder/layer_5/intermediate/dense/add" [label="[]", style=solid]; +"1128 bert/encoder/layer_5/intermediate/dense/BiasAdd" -> "1136 bert/encoder/layer_5/intermediate/dense/mul_3" [label="[]", style=solid]; +"1129 bert/encoder/layer_5/intermediate/dense/Pow" -> "1130 bert/encoder/layer_5/intermediate/dense/mul" [label="[]", style=solid]; +"1130 bert/encoder/layer_5/intermediate/dense/mul" -> "1131 bert/encoder/layer_5/intermediate/dense/add" [label="[]", style=solid]; +"1131 bert/encoder/layer_5/intermediate/dense/add" -> "1132 bert/encoder/layer_5/intermediate/dense/mul_1" [label="[]", style=solid]; +"1132 bert/encoder/layer_5/intermediate/dense/mul_1" -> "1133 bert/encoder/layer_5/intermediate/dense/Tanh" [label="[]", style=solid]; +"1133 bert/encoder/layer_5/intermediate/dense/Tanh" -> "1134 bert/encoder/layer_5/intermediate/dense/add_1" [label="[]", style=solid]; +"1134 bert/encoder/layer_5/intermediate/dense/add_1" -> "1135 bert/encoder/layer_5/intermediate/dense/mul_2" [label="[]", style=solid]; +"1135 bert/encoder/layer_5/intermediate/dense/mul_2" -> "1136 bert/encoder/layer_5/intermediate/dense/mul_3" [label="[]", style=solid]; +"1136 bert/encoder/layer_5/intermediate/dense/mul_3" -> "1137 QuantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"1137 QuantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3^0_1" -> "1138 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"1138 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3^0_1" -> "1141 bert/encoder/layer_5/output/dense/MatMul" [label="[]", style=solid]; +"1139 QuantizeLinear_bert/encoder/layer_5/output/dense/kernel^0_1" -> "1140 DequantizeLinear_bert/encoder/layer_5/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"1140 DequantizeLinear_bert/encoder/layer_5/output/dense/kernel^0_1" -> "1141 bert/encoder/layer_5/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"1141 bert/encoder/layer_5/output/dense/MatMul" -> "1142 bert/encoder/layer_5/output/dense/BiasAdd" [label="[]", style=solid]; +"1142 bert/encoder/layer_5/output/dense/BiasAdd" -> "1143 bert/encoder/layer_5/output/add" [label="[]", style=solid]; +"1143 bert/encoder/layer_5/output/add" -> "1144 bert/encoder/layer_5/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1143 bert/encoder/layer_5/output/add" -> "1146 bert/encoder/layer_5/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1143 bert/encoder/layer_5/output/add" -> "1155 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1144 bert/encoder/layer_5/output/LayerNorm/moments/mean" -> "1145 bert/encoder/layer_5/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1144 bert/encoder/layer_5/output/LayerNorm/moments/mean" -> "1153 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1145 bert/encoder/layer_5/output/LayerNorm/moments/StopGradient" -> "1146 bert/encoder/layer_5/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1146 bert/encoder/layer_5/output/LayerNorm/moments/SquaredDifference" -> "1147 bert/encoder/layer_5/output/LayerNorm/moments/SquaredDifference__383" [label="[]", style=solid]; +"1147 bert/encoder/layer_5/output/LayerNorm/moments/SquaredDifference__383" -> "1148 bert/encoder/layer_5/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1148 bert/encoder/layer_5/output/LayerNorm/moments/variance" -> "1149 bert/encoder/layer_5/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1149 bert/encoder/layer_5/output/LayerNorm/batchnorm/add" -> "1150 bert/encoder/layer_5/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1150 bert/encoder/layer_5/output/LayerNorm/batchnorm/Rsqrt" -> "1151 bert/encoder/layer_5/output/LayerNorm/batchnorm/Rsqrt__385" [label="[]", style=solid]; +"1151 bert/encoder/layer_5/output/LayerNorm/batchnorm/Rsqrt__385" -> "1152 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1152 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul" -> "1153 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1152 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul" -> "1155 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1153 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_2" -> "1154 bert/encoder/layer_5/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1154 bert/encoder/layer_5/output/LayerNorm/batchnorm/sub" -> "1156 bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1155 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_1" -> "1156 bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1156 bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1" -> "1157 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1156 bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1" -> "1161 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"1156 bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1" -> "1163 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"1156 bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1" -> "1202 bert/encoder/layer_6/attention/output/add" [label="[]", style=solid]; +"1157 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_1" -> "1158 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1158 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_1" -> "1165 bert/encoder/layer_6/attention/self/value/MatMul" [label="[]", style=solid]; +"1159 QuantizeLinear_bert/encoder/layer_6/attention/self/value/kernel^0_1" -> "1160 DequantizeLinear_bert/encoder/layer_6/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"1160 DequantizeLinear_bert/encoder/layer_6/attention/self/value/kernel^0_1" -> "1165 bert/encoder/layer_6/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"1161 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_2" -> "1162 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"1162 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_2" -> "1171 bert/encoder/layer_6/attention/self/query/MatMul" [label="[]", style=solid]; +"1163 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_3" -> "1164 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"1164 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_3" -> "1179 bert/encoder/layer_6/attention/self/key/MatMul" [label="[]", style=solid]; +"1165 bert/encoder/layer_6/attention/self/value/MatMul" -> "1166 bert/encoder/layer_6/attention/self/value/BiasAdd" [label="[]", style=solid]; +"1166 bert/encoder/layer_6/attention/self/value/BiasAdd" -> "1167 bert/encoder/layer_6/attention/self/Reshape_2" [label="[]", style=solid]; +"1167 bert/encoder/layer_6/attention/self/Reshape_2" -> "1168 bert/encoder/layer_6/attention/self/transpose_2" [label="[]", style=solid]; +"1168 bert/encoder/layer_6/attention/self/transpose_2" -> "1193 bert/encoder/layer_6/attention/self/MatMul_1" [label="[]", style=solid]; +"1169 QuantizeLinear_bert/encoder/layer_6/attention/self/query/kernel^0_1" -> "1170 DequantizeLinear_bert/encoder/layer_6/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"1170 DequantizeLinear_bert/encoder/layer_6/attention/self/query/kernel^0_1" -> "1171 bert/encoder/layer_6/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"1171 bert/encoder/layer_6/attention/self/query/MatMul" -> "1172 bert/encoder/layer_6/attention/self/query/BiasAdd" [label="[]", style=solid]; +"1172 bert/encoder/layer_6/attention/self/query/BiasAdd" -> "1173 QuantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"1173 QuantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd^0_1" -> "1174 DequantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"1174 DequantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd^0_1" -> "1175 bert/encoder/layer_6/attention/self/Reshape" [label="[]", style=solid]; +"1175 bert/encoder/layer_6/attention/self/Reshape" -> "1176 bert/encoder/layer_6/attention/self/transpose" [label="[]", style=solid]; +"1176 bert/encoder/layer_6/attention/self/transpose" -> "1186 bert/encoder/layer_6/attention/self/MatMul" [label="[]", style=solid]; +"1177 QuantizeLinear_bert/encoder/layer_6/attention/self/key/kernel^0_1" -> "1178 DequantizeLinear_bert/encoder/layer_6/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"1178 DequantizeLinear_bert/encoder/layer_6/attention/self/key/kernel^0_1" -> "1179 bert/encoder/layer_6/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"1179 bert/encoder/layer_6/attention/self/key/MatMul" -> "1180 bert/encoder/layer_6/attention/self/key/BiasAdd" [label="[]", style=solid]; +"1180 bert/encoder/layer_6/attention/self/key/BiasAdd" -> "1181 QuantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"1181 QuantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd^0_1" -> "1182 DequantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"1182 DequantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd^0_1" -> "1183 bert/encoder/layer_6/attention/self/Reshape_1" [label="[]", style=solid]; +"1183 bert/encoder/layer_6/attention/self/Reshape_1" -> "1184 bert/encoder/layer_6/attention/self/transpose_1" [label="[]", style=solid]; +"1184 bert/encoder/layer_6/attention/self/transpose_1" -> "1185 bert/encoder/layer_6/attention/self/MatMul__390" [label="[]", style=solid]; +"1185 bert/encoder/layer_6/attention/self/MatMul__390" -> "1186 bert/encoder/layer_6/attention/self/MatMul" [label="[]", style=solid]; +"1186 bert/encoder/layer_6/attention/self/MatMul" -> "1187 bert/encoder/layer_6/attention/self/Mul" [label="[]", style=solid]; +"1187 bert/encoder/layer_6/attention/self/Mul" -> "1188 bert/encoder/layer_6/attention/self/add" [label="[]", style=solid]; +"1188 bert/encoder/layer_6/attention/self/add" -> "1189 Shape_nncf_999" [label="[]", style=solid]; +"1188 bert/encoder/layer_6/attention/self/add" -> "1190 Flatten_nncf_1000" [label="[]", style=solid]; +"1189 Shape_nncf_999" -> "1192 Reshape_nncf_1002" [label="[-1]", style=dashed]; +"1190 Flatten_nncf_1000" -> "1191 bert/encoder/layer_6/attention/self/Softmax" [label="[]", style=solid]; +"1191 bert/encoder/layer_6/attention/self/Softmax" -> "1192 Reshape_nncf_1002" [label="[]", style=solid]; +"1192 Reshape_nncf_1002" -> "1193 bert/encoder/layer_6/attention/self/MatMul_1" [label="[]", style=solid]; +"1193 bert/encoder/layer_6/attention/self/MatMul_1" -> "1194 QuantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"1194 QuantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1^0_1" -> "1195 DequantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"1195 DequantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1^0_1" -> "1196 bert/encoder/layer_6/attention/self/transpose_3" [label="[]", style=solid]; +"1196 bert/encoder/layer_6/attention/self/transpose_3" -> "1197 bert/encoder/layer_6/attention/self/Reshape_3" [label="[]", style=solid]; +"1197 bert/encoder/layer_6/attention/self/Reshape_3" -> "1200 bert/encoder/layer_6/attention/output/dense/MatMul" [label="[]", style=solid]; +"1198 QuantizeLinear_bert/encoder/layer_6/attention/output/dense/kernel^0_1" -> "1199 DequantizeLinear_bert/encoder/layer_6/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"1199 DequantizeLinear_bert/encoder/layer_6/attention/output/dense/kernel^0_1" -> "1200 bert/encoder/layer_6/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"1200 bert/encoder/layer_6/attention/output/dense/MatMul" -> "1201 bert/encoder/layer_6/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"1201 bert/encoder/layer_6/attention/output/dense/BiasAdd" -> "1202 bert/encoder/layer_6/attention/output/add" [label="[]", style=solid]; +"1202 bert/encoder/layer_6/attention/output/add" -> "1203 bert/encoder/layer_6/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1202 bert/encoder/layer_6/attention/output/add" -> "1205 bert/encoder/layer_6/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1202 bert/encoder/layer_6/attention/output/add" -> "1214 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1203 bert/encoder/layer_6/attention/output/LayerNorm/moments/mean" -> "1204 bert/encoder/layer_6/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1203 bert/encoder/layer_6/attention/output/LayerNorm/moments/mean" -> "1212 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1204 bert/encoder/layer_6/attention/output/LayerNorm/moments/StopGradient" -> "1205 bert/encoder/layer_6/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1205 bert/encoder/layer_6/attention/output/LayerNorm/moments/SquaredDifference" -> "1206 bert/encoder/layer_6/attention/output/LayerNorm/moments/SquaredDifference__393" [label="[]", style=solid]; +"1206 bert/encoder/layer_6/attention/output/LayerNorm/moments/SquaredDifference__393" -> "1207 bert/encoder/layer_6/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1207 bert/encoder/layer_6/attention/output/LayerNorm/moments/variance" -> "1208 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1208 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add" -> "1209 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1209 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/Rsqrt" -> "1210 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/Rsqrt__395" [label="[]", style=solid]; +"1210 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/Rsqrt__395" -> "1211 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1211 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul" -> "1212 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1211 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul" -> "1214 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1212 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_2" -> "1213 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1213 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/sub" -> "1215 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1214 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_1" -> "1215 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1215 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1" -> "1216 QuantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1215 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1" -> "1236 bert/encoder/layer_6/output/add" [label="[]", style=solid]; +"1216 QuantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1217 DequantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1217 DequantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1220 bert/encoder/layer_6/intermediate/dense/MatMul" [label="[]", style=solid]; +"1218 QuantizeLinear_bert/encoder/layer_6/intermediate/dense/kernel^0_1" -> "1219 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"1219 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/kernel^0_1" -> "1220 bert/encoder/layer_6/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"1220 bert/encoder/layer_6/intermediate/dense/MatMul" -> "1221 bert/encoder/layer_6/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"1221 bert/encoder/layer_6/intermediate/dense/BiasAdd" -> "1222 bert/encoder/layer_6/intermediate/dense/Pow" [label="[]", style=solid]; +"1221 bert/encoder/layer_6/intermediate/dense/BiasAdd" -> "1224 bert/encoder/layer_6/intermediate/dense/add" [label="[]", style=solid]; +"1221 bert/encoder/layer_6/intermediate/dense/BiasAdd" -> "1229 bert/encoder/layer_6/intermediate/dense/mul_3" [label="[]", style=solid]; +"1222 bert/encoder/layer_6/intermediate/dense/Pow" -> "1223 bert/encoder/layer_6/intermediate/dense/mul" [label="[]", style=solid]; +"1223 bert/encoder/layer_6/intermediate/dense/mul" -> "1224 bert/encoder/layer_6/intermediate/dense/add" [label="[]", style=solid]; +"1224 bert/encoder/layer_6/intermediate/dense/add" -> "1225 bert/encoder/layer_6/intermediate/dense/mul_1" [label="[]", style=solid]; +"1225 bert/encoder/layer_6/intermediate/dense/mul_1" -> "1226 bert/encoder/layer_6/intermediate/dense/Tanh" [label="[]", style=solid]; +"1226 bert/encoder/layer_6/intermediate/dense/Tanh" -> "1227 bert/encoder/layer_6/intermediate/dense/add_1" [label="[]", style=solid]; +"1227 bert/encoder/layer_6/intermediate/dense/add_1" -> "1228 bert/encoder/layer_6/intermediate/dense/mul_2" [label="[]", style=solid]; +"1228 bert/encoder/layer_6/intermediate/dense/mul_2" -> "1229 bert/encoder/layer_6/intermediate/dense/mul_3" [label="[]", style=solid]; +"1229 bert/encoder/layer_6/intermediate/dense/mul_3" -> "1230 QuantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"1230 QuantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3^0_1" -> "1231 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"1231 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3^0_1" -> "1234 bert/encoder/layer_6/output/dense/MatMul" [label="[]", style=solid]; +"1232 QuantizeLinear_bert/encoder/layer_6/output/dense/kernel^0_1" -> "1233 DequantizeLinear_bert/encoder/layer_6/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"1233 DequantizeLinear_bert/encoder/layer_6/output/dense/kernel^0_1" -> "1234 bert/encoder/layer_6/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"1234 bert/encoder/layer_6/output/dense/MatMul" -> "1235 bert/encoder/layer_6/output/dense/BiasAdd" [label="[]", style=solid]; +"1235 bert/encoder/layer_6/output/dense/BiasAdd" -> "1236 bert/encoder/layer_6/output/add" [label="[]", style=solid]; +"1236 bert/encoder/layer_6/output/add" -> "1237 bert/encoder/layer_6/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1236 bert/encoder/layer_6/output/add" -> "1239 bert/encoder/layer_6/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1236 bert/encoder/layer_6/output/add" -> "1248 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1237 bert/encoder/layer_6/output/LayerNorm/moments/mean" -> "1238 bert/encoder/layer_6/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1237 bert/encoder/layer_6/output/LayerNorm/moments/mean" -> "1246 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1238 bert/encoder/layer_6/output/LayerNorm/moments/StopGradient" -> "1239 bert/encoder/layer_6/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1239 bert/encoder/layer_6/output/LayerNorm/moments/SquaredDifference" -> "1240 bert/encoder/layer_6/output/LayerNorm/moments/SquaredDifference__397" [label="[]", style=solid]; +"1240 bert/encoder/layer_6/output/LayerNorm/moments/SquaredDifference__397" -> "1241 bert/encoder/layer_6/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1241 bert/encoder/layer_6/output/LayerNorm/moments/variance" -> "1242 bert/encoder/layer_6/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1242 bert/encoder/layer_6/output/LayerNorm/batchnorm/add" -> "1243 bert/encoder/layer_6/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1243 bert/encoder/layer_6/output/LayerNorm/batchnorm/Rsqrt" -> "1244 bert/encoder/layer_6/output/LayerNorm/batchnorm/Rsqrt__399" [label="[]", style=solid]; +"1244 bert/encoder/layer_6/output/LayerNorm/batchnorm/Rsqrt__399" -> "1245 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1245 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul" -> "1246 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1245 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul" -> "1248 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1246 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_2" -> "1247 bert/encoder/layer_6/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1247 bert/encoder/layer_6/output/LayerNorm/batchnorm/sub" -> "1249 bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1248 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_1" -> "1249 bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1249 bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1" -> "1250 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1249 bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1" -> "1254 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"1249 bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1" -> "1256 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"1249 bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1" -> "1295 bert/encoder/layer_7/attention/output/add" [label="[]", style=solid]; +"1250 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_1" -> "1251 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1251 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_1" -> "1258 bert/encoder/layer_7/attention/self/value/MatMul" [label="[]", style=solid]; +"1252 QuantizeLinear_bert/encoder/layer_7/attention/self/value/kernel^0_1" -> "1253 DequantizeLinear_bert/encoder/layer_7/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"1253 DequantizeLinear_bert/encoder/layer_7/attention/self/value/kernel^0_1" -> "1258 bert/encoder/layer_7/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"1254 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_2" -> "1255 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"1255 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_2" -> "1264 bert/encoder/layer_7/attention/self/query/MatMul" [label="[]", style=solid]; +"1256 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_3" -> "1257 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"1257 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_3" -> "1272 bert/encoder/layer_7/attention/self/key/MatMul" [label="[]", style=solid]; +"1258 bert/encoder/layer_7/attention/self/value/MatMul" -> "1259 bert/encoder/layer_7/attention/self/value/BiasAdd" [label="[]", style=solid]; +"1259 bert/encoder/layer_7/attention/self/value/BiasAdd" -> "1260 bert/encoder/layer_7/attention/self/Reshape_2" [label="[]", style=solid]; +"1260 bert/encoder/layer_7/attention/self/Reshape_2" -> "1261 bert/encoder/layer_7/attention/self/transpose_2" [label="[]", style=solid]; +"1261 bert/encoder/layer_7/attention/self/transpose_2" -> "1286 bert/encoder/layer_7/attention/self/MatMul_1" [label="[]", style=solid]; +"1262 QuantizeLinear_bert/encoder/layer_7/attention/self/query/kernel^0_1" -> "1263 DequantizeLinear_bert/encoder/layer_7/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"1263 DequantizeLinear_bert/encoder/layer_7/attention/self/query/kernel^0_1" -> "1264 bert/encoder/layer_7/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"1264 bert/encoder/layer_7/attention/self/query/MatMul" -> "1265 bert/encoder/layer_7/attention/self/query/BiasAdd" [label="[]", style=solid]; +"1265 bert/encoder/layer_7/attention/self/query/BiasAdd" -> "1266 QuantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"1266 QuantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd^0_1" -> "1267 DequantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"1267 DequantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd^0_1" -> "1268 bert/encoder/layer_7/attention/self/Reshape" [label="[]", style=solid]; +"1268 bert/encoder/layer_7/attention/self/Reshape" -> "1269 bert/encoder/layer_7/attention/self/transpose" [label="[]", style=solid]; +"1269 bert/encoder/layer_7/attention/self/transpose" -> "1279 bert/encoder/layer_7/attention/self/MatMul" [label="[]", style=solid]; +"1270 QuantizeLinear_bert/encoder/layer_7/attention/self/key/kernel^0_1" -> "1271 DequantizeLinear_bert/encoder/layer_7/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"1271 DequantizeLinear_bert/encoder/layer_7/attention/self/key/kernel^0_1" -> "1272 bert/encoder/layer_7/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"1272 bert/encoder/layer_7/attention/self/key/MatMul" -> "1273 bert/encoder/layer_7/attention/self/key/BiasAdd" [label="[]", style=solid]; +"1273 bert/encoder/layer_7/attention/self/key/BiasAdd" -> "1274 QuantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"1274 QuantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd^0_1" -> "1275 DequantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"1275 DequantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd^0_1" -> "1276 bert/encoder/layer_7/attention/self/Reshape_1" [label="[]", style=solid]; +"1276 bert/encoder/layer_7/attention/self/Reshape_1" -> "1277 bert/encoder/layer_7/attention/self/transpose_1" [label="[]", style=solid]; +"1277 bert/encoder/layer_7/attention/self/transpose_1" -> "1278 bert/encoder/layer_7/attention/self/MatMul__404" [label="[]", style=solid]; +"1278 bert/encoder/layer_7/attention/self/MatMul__404" -> "1279 bert/encoder/layer_7/attention/self/MatMul" [label="[]", style=solid]; +"1279 bert/encoder/layer_7/attention/self/MatMul" -> "1280 bert/encoder/layer_7/attention/self/Mul" [label="[]", style=solid]; +"1280 bert/encoder/layer_7/attention/self/Mul" -> "1281 bert/encoder/layer_7/attention/self/add" [label="[]", style=solid]; +"1281 bert/encoder/layer_7/attention/self/add" -> "1282 Shape_nncf_1064" [label="[]", style=solid]; +"1281 bert/encoder/layer_7/attention/self/add" -> "1283 Flatten_nncf_1065" [label="[]", style=solid]; +"1282 Shape_nncf_1064" -> "1285 Reshape_nncf_1067" [label="[-1]", style=dashed]; +"1283 Flatten_nncf_1065" -> "1284 bert/encoder/layer_7/attention/self/Softmax" [label="[]", style=solid]; +"1284 bert/encoder/layer_7/attention/self/Softmax" -> "1285 Reshape_nncf_1067" [label="[]", style=solid]; +"1285 Reshape_nncf_1067" -> "1286 bert/encoder/layer_7/attention/self/MatMul_1" [label="[]", style=solid]; +"1286 bert/encoder/layer_7/attention/self/MatMul_1" -> "1287 QuantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"1287 QuantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1^0_1" -> "1288 DequantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"1288 DequantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1^0_1" -> "1289 bert/encoder/layer_7/attention/self/transpose_3" [label="[]", style=solid]; +"1289 bert/encoder/layer_7/attention/self/transpose_3" -> "1290 bert/encoder/layer_7/attention/self/Reshape_3" [label="[]", style=solid]; +"1290 bert/encoder/layer_7/attention/self/Reshape_3" -> "1293 bert/encoder/layer_7/attention/output/dense/MatMul" [label="[]", style=solid]; +"1291 QuantizeLinear_bert/encoder/layer_7/attention/output/dense/kernel^0_1" -> "1292 DequantizeLinear_bert/encoder/layer_7/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"1292 DequantizeLinear_bert/encoder/layer_7/attention/output/dense/kernel^0_1" -> "1293 bert/encoder/layer_7/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"1293 bert/encoder/layer_7/attention/output/dense/MatMul" -> "1294 bert/encoder/layer_7/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"1294 bert/encoder/layer_7/attention/output/dense/BiasAdd" -> "1295 bert/encoder/layer_7/attention/output/add" [label="[]", style=solid]; +"1295 bert/encoder/layer_7/attention/output/add" -> "1296 bert/encoder/layer_7/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1295 bert/encoder/layer_7/attention/output/add" -> "1298 bert/encoder/layer_7/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1295 bert/encoder/layer_7/attention/output/add" -> "1307 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1296 bert/encoder/layer_7/attention/output/LayerNorm/moments/mean" -> "1297 bert/encoder/layer_7/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1296 bert/encoder/layer_7/attention/output/LayerNorm/moments/mean" -> "1305 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1297 bert/encoder/layer_7/attention/output/LayerNorm/moments/StopGradient" -> "1298 bert/encoder/layer_7/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1298 bert/encoder/layer_7/attention/output/LayerNorm/moments/SquaredDifference" -> "1299 bert/encoder/layer_7/attention/output/LayerNorm/moments/SquaredDifference__407" [label="[]", style=solid]; +"1299 bert/encoder/layer_7/attention/output/LayerNorm/moments/SquaredDifference__407" -> "1300 bert/encoder/layer_7/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1300 bert/encoder/layer_7/attention/output/LayerNorm/moments/variance" -> "1301 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1301 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add" -> "1302 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1302 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/Rsqrt" -> "1303 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/Rsqrt__409" [label="[]", style=solid]; +"1303 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/Rsqrt__409" -> "1304 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1304 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul" -> "1305 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1304 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul" -> "1307 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1305 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_2" -> "1306 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1306 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/sub" -> "1308 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1307 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_1" -> "1308 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1308 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1" -> "1309 QuantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1308 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1" -> "1329 bert/encoder/layer_7/output/add" [label="[]", style=solid]; +"1309 QuantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1310 DequantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1310 DequantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1313 bert/encoder/layer_7/intermediate/dense/MatMul" [label="[]", style=solid]; +"1311 QuantizeLinear_bert/encoder/layer_7/intermediate/dense/kernel^0_1" -> "1312 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"1312 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/kernel^0_1" -> "1313 bert/encoder/layer_7/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"1313 bert/encoder/layer_7/intermediate/dense/MatMul" -> "1314 bert/encoder/layer_7/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"1314 bert/encoder/layer_7/intermediate/dense/BiasAdd" -> "1315 bert/encoder/layer_7/intermediate/dense/Pow" [label="[]", style=solid]; +"1314 bert/encoder/layer_7/intermediate/dense/BiasAdd" -> "1317 bert/encoder/layer_7/intermediate/dense/add" [label="[]", style=solid]; +"1314 bert/encoder/layer_7/intermediate/dense/BiasAdd" -> "1322 bert/encoder/layer_7/intermediate/dense/mul_3" [label="[]", style=solid]; +"1315 bert/encoder/layer_7/intermediate/dense/Pow" -> "1316 bert/encoder/layer_7/intermediate/dense/mul" [label="[]", style=solid]; +"1316 bert/encoder/layer_7/intermediate/dense/mul" -> "1317 bert/encoder/layer_7/intermediate/dense/add" [label="[]", style=solid]; +"1317 bert/encoder/layer_7/intermediate/dense/add" -> "1318 bert/encoder/layer_7/intermediate/dense/mul_1" [label="[]", style=solid]; +"1318 bert/encoder/layer_7/intermediate/dense/mul_1" -> "1319 bert/encoder/layer_7/intermediate/dense/Tanh" [label="[]", style=solid]; +"1319 bert/encoder/layer_7/intermediate/dense/Tanh" -> "1320 bert/encoder/layer_7/intermediate/dense/add_1" [label="[]", style=solid]; +"1320 bert/encoder/layer_7/intermediate/dense/add_1" -> "1321 bert/encoder/layer_7/intermediate/dense/mul_2" [label="[]", style=solid]; +"1321 bert/encoder/layer_7/intermediate/dense/mul_2" -> "1322 bert/encoder/layer_7/intermediate/dense/mul_3" [label="[]", style=solid]; +"1322 bert/encoder/layer_7/intermediate/dense/mul_3" -> "1323 QuantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"1323 QuantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3^0_1" -> "1324 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"1324 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3^0_1" -> "1327 bert/encoder/layer_7/output/dense/MatMul" [label="[]", style=solid]; +"1325 QuantizeLinear_bert/encoder/layer_7/output/dense/kernel^0_1" -> "1326 DequantizeLinear_bert/encoder/layer_7/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"1326 DequantizeLinear_bert/encoder/layer_7/output/dense/kernel^0_1" -> "1327 bert/encoder/layer_7/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"1327 bert/encoder/layer_7/output/dense/MatMul" -> "1328 bert/encoder/layer_7/output/dense/BiasAdd" [label="[]", style=solid]; +"1328 bert/encoder/layer_7/output/dense/BiasAdd" -> "1329 bert/encoder/layer_7/output/add" [label="[]", style=solid]; +"1329 bert/encoder/layer_7/output/add" -> "1330 bert/encoder/layer_7/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1329 bert/encoder/layer_7/output/add" -> "1332 bert/encoder/layer_7/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1329 bert/encoder/layer_7/output/add" -> "1341 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1330 bert/encoder/layer_7/output/LayerNorm/moments/mean" -> "1331 bert/encoder/layer_7/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1330 bert/encoder/layer_7/output/LayerNorm/moments/mean" -> "1339 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1331 bert/encoder/layer_7/output/LayerNorm/moments/StopGradient" -> "1332 bert/encoder/layer_7/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1332 bert/encoder/layer_7/output/LayerNorm/moments/SquaredDifference" -> "1333 bert/encoder/layer_7/output/LayerNorm/moments/SquaredDifference__411" [label="[]", style=solid]; +"1333 bert/encoder/layer_7/output/LayerNorm/moments/SquaredDifference__411" -> "1334 bert/encoder/layer_7/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1334 bert/encoder/layer_7/output/LayerNorm/moments/variance" -> "1335 bert/encoder/layer_7/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1335 bert/encoder/layer_7/output/LayerNorm/batchnorm/add" -> "1336 bert/encoder/layer_7/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1336 bert/encoder/layer_7/output/LayerNorm/batchnorm/Rsqrt" -> "1337 bert/encoder/layer_7/output/LayerNorm/batchnorm/Rsqrt__413" [label="[]", style=solid]; +"1337 bert/encoder/layer_7/output/LayerNorm/batchnorm/Rsqrt__413" -> "1338 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1338 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul" -> "1339 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1338 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul" -> "1341 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1339 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_2" -> "1340 bert/encoder/layer_7/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1340 bert/encoder/layer_7/output/LayerNorm/batchnorm/sub" -> "1342 bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1341 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_1" -> "1342 bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1342 bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1" -> "1343 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1342 bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1" -> "1347 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"1342 bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1" -> "1349 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"1342 bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1" -> "1388 bert/encoder/layer_8/attention/output/add" [label="[]", style=solid]; +"1343 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_1" -> "1344 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1344 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_1" -> "1351 bert/encoder/layer_8/attention/self/value/MatMul" [label="[]", style=solid]; +"1345 QuantizeLinear_bert/encoder/layer_8/attention/self/value/kernel^0_1" -> "1346 DequantizeLinear_bert/encoder/layer_8/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"1346 DequantizeLinear_bert/encoder/layer_8/attention/self/value/kernel^0_1" -> "1351 bert/encoder/layer_8/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"1347 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_2" -> "1348 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"1348 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_2" -> "1357 bert/encoder/layer_8/attention/self/query/MatMul" [label="[]", style=solid]; +"1349 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_3" -> "1350 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"1350 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_3" -> "1365 bert/encoder/layer_8/attention/self/key/MatMul" [label="[]", style=solid]; +"1351 bert/encoder/layer_8/attention/self/value/MatMul" -> "1352 bert/encoder/layer_8/attention/self/value/BiasAdd" [label="[]", style=solid]; +"1352 bert/encoder/layer_8/attention/self/value/BiasAdd" -> "1353 bert/encoder/layer_8/attention/self/Reshape_2" [label="[]", style=solid]; +"1353 bert/encoder/layer_8/attention/self/Reshape_2" -> "1354 bert/encoder/layer_8/attention/self/transpose_2" [label="[]", style=solid]; +"1354 bert/encoder/layer_8/attention/self/transpose_2" -> "1379 bert/encoder/layer_8/attention/self/MatMul_1" [label="[]", style=solid]; +"1355 QuantizeLinear_bert/encoder/layer_8/attention/self/query/kernel^0_1" -> "1356 DequantizeLinear_bert/encoder/layer_8/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"1356 DequantizeLinear_bert/encoder/layer_8/attention/self/query/kernel^0_1" -> "1357 bert/encoder/layer_8/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"1357 bert/encoder/layer_8/attention/self/query/MatMul" -> "1358 bert/encoder/layer_8/attention/self/query/BiasAdd" [label="[]", style=solid]; +"1358 bert/encoder/layer_8/attention/self/query/BiasAdd" -> "1359 QuantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"1359 QuantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd^0_1" -> "1360 DequantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"1360 DequantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd^0_1" -> "1361 bert/encoder/layer_8/attention/self/Reshape" [label="[]", style=solid]; +"1361 bert/encoder/layer_8/attention/self/Reshape" -> "1362 bert/encoder/layer_8/attention/self/transpose" [label="[]", style=solid]; +"1362 bert/encoder/layer_8/attention/self/transpose" -> "1372 bert/encoder/layer_8/attention/self/MatMul" [label="[]", style=solid]; +"1363 QuantizeLinear_bert/encoder/layer_8/attention/self/key/kernel^0_1" -> "1364 DequantizeLinear_bert/encoder/layer_8/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"1364 DequantizeLinear_bert/encoder/layer_8/attention/self/key/kernel^0_1" -> "1365 bert/encoder/layer_8/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"1365 bert/encoder/layer_8/attention/self/key/MatMul" -> "1366 bert/encoder/layer_8/attention/self/key/BiasAdd" [label="[]", style=solid]; +"1366 bert/encoder/layer_8/attention/self/key/BiasAdd" -> "1367 QuantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"1367 QuantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd^0_1" -> "1368 DequantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"1368 DequantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd^0_1" -> "1369 bert/encoder/layer_8/attention/self/Reshape_1" [label="[]", style=solid]; +"1369 bert/encoder/layer_8/attention/self/Reshape_1" -> "1370 bert/encoder/layer_8/attention/self/transpose_1" [label="[]", style=solid]; +"1370 bert/encoder/layer_8/attention/self/transpose_1" -> "1371 bert/encoder/layer_8/attention/self/MatMul__418" [label="[]", style=solid]; +"1371 bert/encoder/layer_8/attention/self/MatMul__418" -> "1372 bert/encoder/layer_8/attention/self/MatMul" [label="[]", style=solid]; +"1372 bert/encoder/layer_8/attention/self/MatMul" -> "1373 bert/encoder/layer_8/attention/self/Mul" [label="[]", style=solid]; +"1373 bert/encoder/layer_8/attention/self/Mul" -> "1374 bert/encoder/layer_8/attention/self/add" [label="[]", style=solid]; +"1374 bert/encoder/layer_8/attention/self/add" -> "1375 Shape_nncf_1129" [label="[]", style=solid]; +"1374 bert/encoder/layer_8/attention/self/add" -> "1376 Flatten_nncf_1130" [label="[]", style=solid]; +"1375 Shape_nncf_1129" -> "1378 Reshape_nncf_1132" [label="[-1]", style=dashed]; +"1376 Flatten_nncf_1130" -> "1377 bert/encoder/layer_8/attention/self/Softmax" [label="[]", style=solid]; +"1377 bert/encoder/layer_8/attention/self/Softmax" -> "1378 Reshape_nncf_1132" [label="[]", style=solid]; +"1378 Reshape_nncf_1132" -> "1379 bert/encoder/layer_8/attention/self/MatMul_1" [label="[]", style=solid]; +"1379 bert/encoder/layer_8/attention/self/MatMul_1" -> "1380 QuantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"1380 QuantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1^0_1" -> "1381 DequantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"1381 DequantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1^0_1" -> "1382 bert/encoder/layer_8/attention/self/transpose_3" [label="[]", style=solid]; +"1382 bert/encoder/layer_8/attention/self/transpose_3" -> "1383 bert/encoder/layer_8/attention/self/Reshape_3" [label="[]", style=solid]; +"1383 bert/encoder/layer_8/attention/self/Reshape_3" -> "1386 bert/encoder/layer_8/attention/output/dense/MatMul" [label="[]", style=solid]; +"1384 QuantizeLinear_bert/encoder/layer_8/attention/output/dense/kernel^0_1" -> "1385 DequantizeLinear_bert/encoder/layer_8/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"1385 DequantizeLinear_bert/encoder/layer_8/attention/output/dense/kernel^0_1" -> "1386 bert/encoder/layer_8/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"1386 bert/encoder/layer_8/attention/output/dense/MatMul" -> "1387 bert/encoder/layer_8/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"1387 bert/encoder/layer_8/attention/output/dense/BiasAdd" -> "1388 bert/encoder/layer_8/attention/output/add" [label="[]", style=solid]; +"1388 bert/encoder/layer_8/attention/output/add" -> "1389 bert/encoder/layer_8/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1388 bert/encoder/layer_8/attention/output/add" -> "1391 bert/encoder/layer_8/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1388 bert/encoder/layer_8/attention/output/add" -> "1400 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1389 bert/encoder/layer_8/attention/output/LayerNorm/moments/mean" -> "1390 bert/encoder/layer_8/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1389 bert/encoder/layer_8/attention/output/LayerNorm/moments/mean" -> "1398 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1390 bert/encoder/layer_8/attention/output/LayerNorm/moments/StopGradient" -> "1391 bert/encoder/layer_8/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1391 bert/encoder/layer_8/attention/output/LayerNorm/moments/SquaredDifference" -> "1392 bert/encoder/layer_8/attention/output/LayerNorm/moments/SquaredDifference__421" [label="[]", style=solid]; +"1392 bert/encoder/layer_8/attention/output/LayerNorm/moments/SquaredDifference__421" -> "1393 bert/encoder/layer_8/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1393 bert/encoder/layer_8/attention/output/LayerNorm/moments/variance" -> "1394 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1394 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add" -> "1395 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1395 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/Rsqrt" -> "1396 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/Rsqrt__423" [label="[]", style=solid]; +"1396 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/Rsqrt__423" -> "1397 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1397 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul" -> "1398 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1397 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul" -> "1400 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1398 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_2" -> "1399 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1399 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/sub" -> "1401 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1400 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_1" -> "1401 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1401 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1" -> "1402 QuantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1401 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1" -> "1422 bert/encoder/layer_8/output/add" [label="[]", style=solid]; +"1402 QuantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1403 DequantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1403 DequantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1406 bert/encoder/layer_8/intermediate/dense/MatMul" [label="[]", style=solid]; +"1404 QuantizeLinear_bert/encoder/layer_8/intermediate/dense/kernel^0_1" -> "1405 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"1405 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/kernel^0_1" -> "1406 bert/encoder/layer_8/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"1406 bert/encoder/layer_8/intermediate/dense/MatMul" -> "1407 bert/encoder/layer_8/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"1407 bert/encoder/layer_8/intermediate/dense/BiasAdd" -> "1408 bert/encoder/layer_8/intermediate/dense/Pow" [label="[]", style=solid]; +"1407 bert/encoder/layer_8/intermediate/dense/BiasAdd" -> "1410 bert/encoder/layer_8/intermediate/dense/add" [label="[]", style=solid]; +"1407 bert/encoder/layer_8/intermediate/dense/BiasAdd" -> "1415 bert/encoder/layer_8/intermediate/dense/mul_3" [label="[]", style=solid]; +"1408 bert/encoder/layer_8/intermediate/dense/Pow" -> "1409 bert/encoder/layer_8/intermediate/dense/mul" [label="[]", style=solid]; +"1409 bert/encoder/layer_8/intermediate/dense/mul" -> "1410 bert/encoder/layer_8/intermediate/dense/add" [label="[]", style=solid]; +"1410 bert/encoder/layer_8/intermediate/dense/add" -> "1411 bert/encoder/layer_8/intermediate/dense/mul_1" [label="[]", style=solid]; +"1411 bert/encoder/layer_8/intermediate/dense/mul_1" -> "1412 bert/encoder/layer_8/intermediate/dense/Tanh" [label="[]", style=solid]; +"1412 bert/encoder/layer_8/intermediate/dense/Tanh" -> "1413 bert/encoder/layer_8/intermediate/dense/add_1" [label="[]", style=solid]; +"1413 bert/encoder/layer_8/intermediate/dense/add_1" -> "1414 bert/encoder/layer_8/intermediate/dense/mul_2" [label="[]", style=solid]; +"1414 bert/encoder/layer_8/intermediate/dense/mul_2" -> "1415 bert/encoder/layer_8/intermediate/dense/mul_3" [label="[]", style=solid]; +"1415 bert/encoder/layer_8/intermediate/dense/mul_3" -> "1416 QuantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"1416 QuantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3^0_1" -> "1417 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"1417 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3^0_1" -> "1420 bert/encoder/layer_8/output/dense/MatMul" [label="[]", style=solid]; +"1418 QuantizeLinear_bert/encoder/layer_8/output/dense/kernel^0_1" -> "1419 DequantizeLinear_bert/encoder/layer_8/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"1419 DequantizeLinear_bert/encoder/layer_8/output/dense/kernel^0_1" -> "1420 bert/encoder/layer_8/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"1420 bert/encoder/layer_8/output/dense/MatMul" -> "1421 bert/encoder/layer_8/output/dense/BiasAdd" [label="[]", style=solid]; +"1421 bert/encoder/layer_8/output/dense/BiasAdd" -> "1422 bert/encoder/layer_8/output/add" [label="[]", style=solid]; +"1422 bert/encoder/layer_8/output/add" -> "1423 bert/encoder/layer_8/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1422 bert/encoder/layer_8/output/add" -> "1425 bert/encoder/layer_8/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1422 bert/encoder/layer_8/output/add" -> "1434 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1423 bert/encoder/layer_8/output/LayerNorm/moments/mean" -> "1424 bert/encoder/layer_8/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1423 bert/encoder/layer_8/output/LayerNorm/moments/mean" -> "1432 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1424 bert/encoder/layer_8/output/LayerNorm/moments/StopGradient" -> "1425 bert/encoder/layer_8/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1425 bert/encoder/layer_8/output/LayerNorm/moments/SquaredDifference" -> "1426 bert/encoder/layer_8/output/LayerNorm/moments/SquaredDifference__425" [label="[]", style=solid]; +"1426 bert/encoder/layer_8/output/LayerNorm/moments/SquaredDifference__425" -> "1427 bert/encoder/layer_8/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1427 bert/encoder/layer_8/output/LayerNorm/moments/variance" -> "1428 bert/encoder/layer_8/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1428 bert/encoder/layer_8/output/LayerNorm/batchnorm/add" -> "1429 bert/encoder/layer_8/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1429 bert/encoder/layer_8/output/LayerNorm/batchnorm/Rsqrt" -> "1430 bert/encoder/layer_8/output/LayerNorm/batchnorm/Rsqrt__427" [label="[]", style=solid]; +"1430 bert/encoder/layer_8/output/LayerNorm/batchnorm/Rsqrt__427" -> "1431 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1431 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul" -> "1432 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1431 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul" -> "1434 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1432 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_2" -> "1433 bert/encoder/layer_8/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1433 bert/encoder/layer_8/output/LayerNorm/batchnorm/sub" -> "1435 bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1434 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_1" -> "1435 bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1435 bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1" -> "1436 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1435 bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1" -> "1440 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"1435 bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1" -> "1442 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"1435 bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1" -> "1481 bert/encoder/layer_9/attention/output/add" [label="[]", style=solid]; +"1436 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_1" -> "1437 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1437 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_1" -> "1444 bert/encoder/layer_9/attention/self/value/MatMul" [label="[]", style=solid]; +"1438 QuantizeLinear_bert/encoder/layer_9/attention/self/value/kernel^0_1" -> "1439 DequantizeLinear_bert/encoder/layer_9/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"1439 DequantizeLinear_bert/encoder/layer_9/attention/self/value/kernel^0_1" -> "1444 bert/encoder/layer_9/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"1440 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_2" -> "1441 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"1441 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_2" -> "1450 bert/encoder/layer_9/attention/self/query/MatMul" [label="[]", style=solid]; +"1442 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_3" -> "1443 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"1443 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_3" -> "1458 bert/encoder/layer_9/attention/self/key/MatMul" [label="[]", style=solid]; +"1444 bert/encoder/layer_9/attention/self/value/MatMul" -> "1445 bert/encoder/layer_9/attention/self/value/BiasAdd" [label="[]", style=solid]; +"1445 bert/encoder/layer_9/attention/self/value/BiasAdd" -> "1446 bert/encoder/layer_9/attention/self/Reshape_2" [label="[]", style=solid]; +"1446 bert/encoder/layer_9/attention/self/Reshape_2" -> "1447 bert/encoder/layer_9/attention/self/transpose_2" [label="[]", style=solid]; +"1447 bert/encoder/layer_9/attention/self/transpose_2" -> "1472 bert/encoder/layer_9/attention/self/MatMul_1" [label="[]", style=solid]; +"1448 QuantizeLinear_bert/encoder/layer_9/attention/self/query/kernel^0_1" -> "1449 DequantizeLinear_bert/encoder/layer_9/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"1449 DequantizeLinear_bert/encoder/layer_9/attention/self/query/kernel^0_1" -> "1450 bert/encoder/layer_9/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"1450 bert/encoder/layer_9/attention/self/query/MatMul" -> "1451 bert/encoder/layer_9/attention/self/query/BiasAdd" [label="[]", style=solid]; +"1451 bert/encoder/layer_9/attention/self/query/BiasAdd" -> "1452 QuantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"1452 QuantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd^0_1" -> "1453 DequantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"1453 DequantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd^0_1" -> "1454 bert/encoder/layer_9/attention/self/Reshape" [label="[]", style=solid]; +"1454 bert/encoder/layer_9/attention/self/Reshape" -> "1455 bert/encoder/layer_9/attention/self/transpose" [label="[]", style=solid]; +"1455 bert/encoder/layer_9/attention/self/transpose" -> "1465 bert/encoder/layer_9/attention/self/MatMul" [label="[]", style=solid]; +"1456 QuantizeLinear_bert/encoder/layer_9/attention/self/key/kernel^0_1" -> "1457 DequantizeLinear_bert/encoder/layer_9/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"1457 DequantizeLinear_bert/encoder/layer_9/attention/self/key/kernel^0_1" -> "1458 bert/encoder/layer_9/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"1458 bert/encoder/layer_9/attention/self/key/MatMul" -> "1459 bert/encoder/layer_9/attention/self/key/BiasAdd" [label="[]", style=solid]; +"1459 bert/encoder/layer_9/attention/self/key/BiasAdd" -> "1460 QuantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"1460 QuantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd^0_1" -> "1461 DequantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"1461 DequantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd^0_1" -> "1462 bert/encoder/layer_9/attention/self/Reshape_1" [label="[]", style=solid]; +"1462 bert/encoder/layer_9/attention/self/Reshape_1" -> "1463 bert/encoder/layer_9/attention/self/transpose_1" [label="[]", style=solid]; +"1463 bert/encoder/layer_9/attention/self/transpose_1" -> "1464 bert/encoder/layer_9/attention/self/MatMul__432" [label="[]", style=solid]; +"1464 bert/encoder/layer_9/attention/self/MatMul__432" -> "1465 bert/encoder/layer_9/attention/self/MatMul" [label="[]", style=solid]; +"1465 bert/encoder/layer_9/attention/self/MatMul" -> "1466 bert/encoder/layer_9/attention/self/Mul" [label="[]", style=solid]; +"1466 bert/encoder/layer_9/attention/self/Mul" -> "1467 bert/encoder/layer_9/attention/self/add" [label="[]", style=solid]; +"1467 bert/encoder/layer_9/attention/self/add" -> "1468 Shape_nncf_1194" [label="[]", style=solid]; +"1467 bert/encoder/layer_9/attention/self/add" -> "1469 Flatten_nncf_1195" [label="[]", style=solid]; +"1468 Shape_nncf_1194" -> "1471 Reshape_nncf_1197" [label="[-1]", style=dashed]; +"1469 Flatten_nncf_1195" -> "1470 bert/encoder/layer_9/attention/self/Softmax" [label="[]", style=solid]; +"1470 bert/encoder/layer_9/attention/self/Softmax" -> "1471 Reshape_nncf_1197" [label="[]", style=solid]; +"1471 Reshape_nncf_1197" -> "1472 bert/encoder/layer_9/attention/self/MatMul_1" [label="[]", style=solid]; +"1472 bert/encoder/layer_9/attention/self/MatMul_1" -> "1473 QuantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"1473 QuantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1^0_1" -> "1474 DequantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"1474 DequantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1^0_1" -> "1475 bert/encoder/layer_9/attention/self/transpose_3" [label="[]", style=solid]; +"1475 bert/encoder/layer_9/attention/self/transpose_3" -> "1476 bert/encoder/layer_9/attention/self/Reshape_3" [label="[]", style=solid]; +"1476 bert/encoder/layer_9/attention/self/Reshape_3" -> "1479 bert/encoder/layer_9/attention/output/dense/MatMul" [label="[]", style=solid]; +"1477 QuantizeLinear_bert/encoder/layer_9/attention/output/dense/kernel^0_1" -> "1478 DequantizeLinear_bert/encoder/layer_9/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"1478 DequantizeLinear_bert/encoder/layer_9/attention/output/dense/kernel^0_1" -> "1479 bert/encoder/layer_9/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"1479 bert/encoder/layer_9/attention/output/dense/MatMul" -> "1480 bert/encoder/layer_9/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"1480 bert/encoder/layer_9/attention/output/dense/BiasAdd" -> "1481 bert/encoder/layer_9/attention/output/add" [label="[]", style=solid]; +"1481 bert/encoder/layer_9/attention/output/add" -> "1482 bert/encoder/layer_9/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1481 bert/encoder/layer_9/attention/output/add" -> "1484 bert/encoder/layer_9/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1481 bert/encoder/layer_9/attention/output/add" -> "1493 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1482 bert/encoder/layer_9/attention/output/LayerNorm/moments/mean" -> "1483 bert/encoder/layer_9/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1482 bert/encoder/layer_9/attention/output/LayerNorm/moments/mean" -> "1491 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1483 bert/encoder/layer_9/attention/output/LayerNorm/moments/StopGradient" -> "1484 bert/encoder/layer_9/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1484 bert/encoder/layer_9/attention/output/LayerNorm/moments/SquaredDifference" -> "1485 bert/encoder/layer_9/attention/output/LayerNorm/moments/SquaredDifference__435" [label="[]", style=solid]; +"1485 bert/encoder/layer_9/attention/output/LayerNorm/moments/SquaredDifference__435" -> "1486 bert/encoder/layer_9/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1486 bert/encoder/layer_9/attention/output/LayerNorm/moments/variance" -> "1487 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1487 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add" -> "1488 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1488 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/Rsqrt" -> "1489 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/Rsqrt__437" [label="[]", style=solid]; +"1489 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/Rsqrt__437" -> "1490 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1490 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul" -> "1491 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1490 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul" -> "1493 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1491 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_2" -> "1492 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1492 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/sub" -> "1494 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1493 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_1" -> "1494 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1494 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1" -> "1495 QuantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1494 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1" -> "1515 bert/encoder/layer_9/output/add" [label="[]", style=solid]; +"1495 QuantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1496 DequantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1496 DequantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1499 bert/encoder/layer_9/intermediate/dense/MatMul" [label="[]", style=solid]; +"1497 QuantizeLinear_bert/encoder/layer_9/intermediate/dense/kernel^0_1" -> "1498 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"1498 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/kernel^0_1" -> "1499 bert/encoder/layer_9/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"1499 bert/encoder/layer_9/intermediate/dense/MatMul" -> "1500 bert/encoder/layer_9/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"1500 bert/encoder/layer_9/intermediate/dense/BiasAdd" -> "1501 bert/encoder/layer_9/intermediate/dense/Pow" [label="[]", style=solid]; +"1500 bert/encoder/layer_9/intermediate/dense/BiasAdd" -> "1503 bert/encoder/layer_9/intermediate/dense/add" [label="[]", style=solid]; +"1500 bert/encoder/layer_9/intermediate/dense/BiasAdd" -> "1508 bert/encoder/layer_9/intermediate/dense/mul_3" [label="[]", style=solid]; +"1501 bert/encoder/layer_9/intermediate/dense/Pow" -> "1502 bert/encoder/layer_9/intermediate/dense/mul" [label="[]", style=solid]; +"1502 bert/encoder/layer_9/intermediate/dense/mul" -> "1503 bert/encoder/layer_9/intermediate/dense/add" [label="[]", style=solid]; +"1503 bert/encoder/layer_9/intermediate/dense/add" -> "1504 bert/encoder/layer_9/intermediate/dense/mul_1" [label="[]", style=solid]; +"1504 bert/encoder/layer_9/intermediate/dense/mul_1" -> "1505 bert/encoder/layer_9/intermediate/dense/Tanh" [label="[]", style=solid]; +"1505 bert/encoder/layer_9/intermediate/dense/Tanh" -> "1506 bert/encoder/layer_9/intermediate/dense/add_1" [label="[]", style=solid]; +"1506 bert/encoder/layer_9/intermediate/dense/add_1" -> "1507 bert/encoder/layer_9/intermediate/dense/mul_2" [label="[]", style=solid]; +"1507 bert/encoder/layer_9/intermediate/dense/mul_2" -> "1508 bert/encoder/layer_9/intermediate/dense/mul_3" [label="[]", style=solid]; +"1508 bert/encoder/layer_9/intermediate/dense/mul_3" -> "1509 QuantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"1509 QuantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3^0_1" -> "1510 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"1510 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3^0_1" -> "1513 bert/encoder/layer_9/output/dense/MatMul" [label="[]", style=solid]; +"1511 QuantizeLinear_bert/encoder/layer_9/output/dense/kernel^0_1" -> "1512 DequantizeLinear_bert/encoder/layer_9/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"1512 DequantizeLinear_bert/encoder/layer_9/output/dense/kernel^0_1" -> "1513 bert/encoder/layer_9/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"1513 bert/encoder/layer_9/output/dense/MatMul" -> "1514 bert/encoder/layer_9/output/dense/BiasAdd" [label="[]", style=solid]; +"1514 bert/encoder/layer_9/output/dense/BiasAdd" -> "1515 bert/encoder/layer_9/output/add" [label="[]", style=solid]; +"1515 bert/encoder/layer_9/output/add" -> "1516 bert/encoder/layer_9/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1515 bert/encoder/layer_9/output/add" -> "1518 bert/encoder/layer_9/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1515 bert/encoder/layer_9/output/add" -> "1527 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1516 bert/encoder/layer_9/output/LayerNorm/moments/mean" -> "1517 bert/encoder/layer_9/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1516 bert/encoder/layer_9/output/LayerNorm/moments/mean" -> "1525 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1517 bert/encoder/layer_9/output/LayerNorm/moments/StopGradient" -> "1518 bert/encoder/layer_9/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1518 bert/encoder/layer_9/output/LayerNorm/moments/SquaredDifference" -> "1519 bert/encoder/layer_9/output/LayerNorm/moments/SquaredDifference__439" [label="[]", style=solid]; +"1519 bert/encoder/layer_9/output/LayerNorm/moments/SquaredDifference__439" -> "1520 bert/encoder/layer_9/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1520 bert/encoder/layer_9/output/LayerNorm/moments/variance" -> "1521 bert/encoder/layer_9/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1521 bert/encoder/layer_9/output/LayerNorm/batchnorm/add" -> "1522 bert/encoder/layer_9/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1522 bert/encoder/layer_9/output/LayerNorm/batchnorm/Rsqrt" -> "1523 bert/encoder/layer_9/output/LayerNorm/batchnorm/Rsqrt__441" [label="[]", style=solid]; +"1523 bert/encoder/layer_9/output/LayerNorm/batchnorm/Rsqrt__441" -> "1524 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1524 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul" -> "1525 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1524 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul" -> "1527 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1525 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_2" -> "1526 bert/encoder/layer_9/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1526 bert/encoder/layer_9/output/LayerNorm/batchnorm/sub" -> "1528 bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1527 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_1" -> "1528 bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1528 bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1" -> "1529 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1528 bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1" -> "1533 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"1528 bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1" -> "1535 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"1528 bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1" -> "1574 bert/encoder/layer_10/attention/output/add" [label="[]", style=solid]; +"1529 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_1" -> "1530 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1530 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_1" -> "1537 bert/encoder/layer_10/attention/self/value/MatMul" [label="[]", style=solid]; +"1531 QuantizeLinear_bert/encoder/layer_10/attention/self/value/kernel^0_1" -> "1532 DequantizeLinear_bert/encoder/layer_10/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"1532 DequantizeLinear_bert/encoder/layer_10/attention/self/value/kernel^0_1" -> "1537 bert/encoder/layer_10/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"1533 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_2" -> "1534 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"1534 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_2" -> "1543 bert/encoder/layer_10/attention/self/query/MatMul" [label="[]", style=solid]; +"1535 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_3" -> "1536 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"1536 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_3" -> "1551 bert/encoder/layer_10/attention/self/key/MatMul" [label="[]", style=solid]; +"1537 bert/encoder/layer_10/attention/self/value/MatMul" -> "1538 bert/encoder/layer_10/attention/self/value/BiasAdd" [label="[]", style=solid]; +"1538 bert/encoder/layer_10/attention/self/value/BiasAdd" -> "1539 bert/encoder/layer_10/attention/self/Reshape_2" [label="[]", style=solid]; +"1539 bert/encoder/layer_10/attention/self/Reshape_2" -> "1540 bert/encoder/layer_10/attention/self/transpose_2" [label="[]", style=solid]; +"1540 bert/encoder/layer_10/attention/self/transpose_2" -> "1565 bert/encoder/layer_10/attention/self/MatMul_1" [label="[]", style=solid]; +"1541 QuantizeLinear_bert/encoder/layer_10/attention/self/query/kernel^0_1" -> "1542 DequantizeLinear_bert/encoder/layer_10/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"1542 DequantizeLinear_bert/encoder/layer_10/attention/self/query/kernel^0_1" -> "1543 bert/encoder/layer_10/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"1543 bert/encoder/layer_10/attention/self/query/MatMul" -> "1544 bert/encoder/layer_10/attention/self/query/BiasAdd" [label="[]", style=solid]; +"1544 bert/encoder/layer_10/attention/self/query/BiasAdd" -> "1545 QuantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"1545 QuantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd^0_1" -> "1546 DequantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"1546 DequantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd^0_1" -> "1547 bert/encoder/layer_10/attention/self/Reshape" [label="[]", style=solid]; +"1547 bert/encoder/layer_10/attention/self/Reshape" -> "1548 bert/encoder/layer_10/attention/self/transpose" [label="[]", style=solid]; +"1548 bert/encoder/layer_10/attention/self/transpose" -> "1558 bert/encoder/layer_10/attention/self/MatMul" [label="[]", style=solid]; +"1549 QuantizeLinear_bert/encoder/layer_10/attention/self/key/kernel^0_1" -> "1550 DequantizeLinear_bert/encoder/layer_10/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"1550 DequantizeLinear_bert/encoder/layer_10/attention/self/key/kernel^0_1" -> "1551 bert/encoder/layer_10/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"1551 bert/encoder/layer_10/attention/self/key/MatMul" -> "1552 bert/encoder/layer_10/attention/self/key/BiasAdd" [label="[]", style=solid]; +"1552 bert/encoder/layer_10/attention/self/key/BiasAdd" -> "1553 QuantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"1553 QuantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd^0_1" -> "1554 DequantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"1554 DequantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd^0_1" -> "1555 bert/encoder/layer_10/attention/self/Reshape_1" [label="[]", style=solid]; +"1555 bert/encoder/layer_10/attention/self/Reshape_1" -> "1556 bert/encoder/layer_10/attention/self/transpose_1" [label="[]", style=solid]; +"1556 bert/encoder/layer_10/attention/self/transpose_1" -> "1557 bert/encoder/layer_10/attention/self/MatMul__446" [label="[]", style=solid]; +"1557 bert/encoder/layer_10/attention/self/MatMul__446" -> "1558 bert/encoder/layer_10/attention/self/MatMul" [label="[]", style=solid]; +"1558 bert/encoder/layer_10/attention/self/MatMul" -> "1559 bert/encoder/layer_10/attention/self/Mul" [label="[]", style=solid]; +"1559 bert/encoder/layer_10/attention/self/Mul" -> "1560 bert/encoder/layer_10/attention/self/add" [label="[]", style=solid]; +"1560 bert/encoder/layer_10/attention/self/add" -> "1561 Shape_nncf_1259" [label="[]", style=solid]; +"1560 bert/encoder/layer_10/attention/self/add" -> "1562 Flatten_nncf_1260" [label="[]", style=solid]; +"1561 Shape_nncf_1259" -> "1564 Reshape_nncf_1262" [label="[-1]", style=dashed]; +"1562 Flatten_nncf_1260" -> "1563 bert/encoder/layer_10/attention/self/Softmax" [label="[]", style=solid]; +"1563 bert/encoder/layer_10/attention/self/Softmax" -> "1564 Reshape_nncf_1262" [label="[]", style=solid]; +"1564 Reshape_nncf_1262" -> "1565 bert/encoder/layer_10/attention/self/MatMul_1" [label="[]", style=solid]; +"1565 bert/encoder/layer_10/attention/self/MatMul_1" -> "1566 QuantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"1566 QuantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1^0_1" -> "1567 DequantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"1567 DequantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1^0_1" -> "1568 bert/encoder/layer_10/attention/self/transpose_3" [label="[]", style=solid]; +"1568 bert/encoder/layer_10/attention/self/transpose_3" -> "1569 bert/encoder/layer_10/attention/self/Reshape_3" [label="[]", style=solid]; +"1569 bert/encoder/layer_10/attention/self/Reshape_3" -> "1572 bert/encoder/layer_10/attention/output/dense/MatMul" [label="[]", style=solid]; +"1570 QuantizeLinear_bert/encoder/layer_10/attention/output/dense/kernel^0_1" -> "1571 DequantizeLinear_bert/encoder/layer_10/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"1571 DequantizeLinear_bert/encoder/layer_10/attention/output/dense/kernel^0_1" -> "1572 bert/encoder/layer_10/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"1572 bert/encoder/layer_10/attention/output/dense/MatMul" -> "1573 bert/encoder/layer_10/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"1573 bert/encoder/layer_10/attention/output/dense/BiasAdd" -> "1574 bert/encoder/layer_10/attention/output/add" [label="[]", style=solid]; +"1574 bert/encoder/layer_10/attention/output/add" -> "1575 bert/encoder/layer_10/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1574 bert/encoder/layer_10/attention/output/add" -> "1577 bert/encoder/layer_10/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1574 bert/encoder/layer_10/attention/output/add" -> "1586 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1575 bert/encoder/layer_10/attention/output/LayerNorm/moments/mean" -> "1576 bert/encoder/layer_10/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1575 bert/encoder/layer_10/attention/output/LayerNorm/moments/mean" -> "1584 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1576 bert/encoder/layer_10/attention/output/LayerNorm/moments/StopGradient" -> "1577 bert/encoder/layer_10/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1577 bert/encoder/layer_10/attention/output/LayerNorm/moments/SquaredDifference" -> "1578 bert/encoder/layer_10/attention/output/LayerNorm/moments/SquaredDifference__449" [label="[]", style=solid]; +"1578 bert/encoder/layer_10/attention/output/LayerNorm/moments/SquaredDifference__449" -> "1579 bert/encoder/layer_10/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1579 bert/encoder/layer_10/attention/output/LayerNorm/moments/variance" -> "1580 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1580 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add" -> "1581 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1581 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/Rsqrt" -> "1582 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/Rsqrt__451" [label="[]", style=solid]; +"1582 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/Rsqrt__451" -> "1583 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1583 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul" -> "1584 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1583 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul" -> "1586 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1584 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_2" -> "1585 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1585 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/sub" -> "1587 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1586 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_1" -> "1587 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1587 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1" -> "1588 QuantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1587 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1" -> "1608 bert/encoder/layer_10/output/add" [label="[]", style=solid]; +"1588 QuantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1589 DequantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1589 DequantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1592 bert/encoder/layer_10/intermediate/dense/MatMul" [label="[]", style=solid]; +"1590 QuantizeLinear_bert/encoder/layer_10/intermediate/dense/kernel^0_1" -> "1591 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"1591 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/kernel^0_1" -> "1592 bert/encoder/layer_10/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"1592 bert/encoder/layer_10/intermediate/dense/MatMul" -> "1593 bert/encoder/layer_10/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"1593 bert/encoder/layer_10/intermediate/dense/BiasAdd" -> "1594 bert/encoder/layer_10/intermediate/dense/Pow" [label="[]", style=solid]; +"1593 bert/encoder/layer_10/intermediate/dense/BiasAdd" -> "1596 bert/encoder/layer_10/intermediate/dense/add" [label="[]", style=solid]; +"1593 bert/encoder/layer_10/intermediate/dense/BiasAdd" -> "1601 bert/encoder/layer_10/intermediate/dense/mul_3" [label="[]", style=solid]; +"1594 bert/encoder/layer_10/intermediate/dense/Pow" -> "1595 bert/encoder/layer_10/intermediate/dense/mul" [label="[]", style=solid]; +"1595 bert/encoder/layer_10/intermediate/dense/mul" -> "1596 bert/encoder/layer_10/intermediate/dense/add" [label="[]", style=solid]; +"1596 bert/encoder/layer_10/intermediate/dense/add" -> "1597 bert/encoder/layer_10/intermediate/dense/mul_1" [label="[]", style=solid]; +"1597 bert/encoder/layer_10/intermediate/dense/mul_1" -> "1598 bert/encoder/layer_10/intermediate/dense/Tanh" [label="[]", style=solid]; +"1598 bert/encoder/layer_10/intermediate/dense/Tanh" -> "1599 bert/encoder/layer_10/intermediate/dense/add_1" [label="[]", style=solid]; +"1599 bert/encoder/layer_10/intermediate/dense/add_1" -> "1600 bert/encoder/layer_10/intermediate/dense/mul_2" [label="[]", style=solid]; +"1600 bert/encoder/layer_10/intermediate/dense/mul_2" -> "1601 bert/encoder/layer_10/intermediate/dense/mul_3" [label="[]", style=solid]; +"1601 bert/encoder/layer_10/intermediate/dense/mul_3" -> "1602 QuantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"1602 QuantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3^0_1" -> "1603 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"1603 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3^0_1" -> "1606 bert/encoder/layer_10/output/dense/MatMul" [label="[]", style=solid]; +"1604 QuantizeLinear_bert/encoder/layer_10/output/dense/kernel^0_1" -> "1605 DequantizeLinear_bert/encoder/layer_10/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"1605 DequantizeLinear_bert/encoder/layer_10/output/dense/kernel^0_1" -> "1606 bert/encoder/layer_10/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"1606 bert/encoder/layer_10/output/dense/MatMul" -> "1607 bert/encoder/layer_10/output/dense/BiasAdd" [label="[]", style=solid]; +"1607 bert/encoder/layer_10/output/dense/BiasAdd" -> "1608 bert/encoder/layer_10/output/add" [label="[]", style=solid]; +"1608 bert/encoder/layer_10/output/add" -> "1609 bert/encoder/layer_10/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1608 bert/encoder/layer_10/output/add" -> "1611 bert/encoder/layer_10/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1608 bert/encoder/layer_10/output/add" -> "1620 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1609 bert/encoder/layer_10/output/LayerNorm/moments/mean" -> "1610 bert/encoder/layer_10/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1609 bert/encoder/layer_10/output/LayerNorm/moments/mean" -> "1618 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1610 bert/encoder/layer_10/output/LayerNorm/moments/StopGradient" -> "1611 bert/encoder/layer_10/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1611 bert/encoder/layer_10/output/LayerNorm/moments/SquaredDifference" -> "1612 bert/encoder/layer_10/output/LayerNorm/moments/SquaredDifference__453" [label="[]", style=solid]; +"1612 bert/encoder/layer_10/output/LayerNorm/moments/SquaredDifference__453" -> "1613 bert/encoder/layer_10/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1613 bert/encoder/layer_10/output/LayerNorm/moments/variance" -> "1614 bert/encoder/layer_10/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1614 bert/encoder/layer_10/output/LayerNorm/batchnorm/add" -> "1615 bert/encoder/layer_10/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1615 bert/encoder/layer_10/output/LayerNorm/batchnorm/Rsqrt" -> "1616 bert/encoder/layer_10/output/LayerNorm/batchnorm/Rsqrt__455" [label="[]", style=solid]; +"1616 bert/encoder/layer_10/output/LayerNorm/batchnorm/Rsqrt__455" -> "1617 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1617 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul" -> "1618 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1617 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul" -> "1620 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1618 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_2" -> "1619 bert/encoder/layer_10/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1619 bert/encoder/layer_10/output/LayerNorm/batchnorm/sub" -> "1621 bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1620 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_1" -> "1621 bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1621 bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1" -> "1622 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1621 bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1" -> "1626 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"1621 bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1" -> "1628 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"1621 bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1" -> "1667 bert/encoder/layer_11/attention/output/add" [label="[]", style=solid]; +"1622 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_1" -> "1623 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1623 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_1" -> "1630 bert/encoder/layer_11/attention/self/value/MatMul" [label="[]", style=solid]; +"1624 QuantizeLinear_bert/encoder/layer_11/attention/self/value/kernel^0_1" -> "1625 DequantizeLinear_bert/encoder/layer_11/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"1625 DequantizeLinear_bert/encoder/layer_11/attention/self/value/kernel^0_1" -> "1630 bert/encoder/layer_11/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"1626 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_2" -> "1627 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"1627 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_2" -> "1636 bert/encoder/layer_11/attention/self/query/MatMul" [label="[]", style=solid]; +"1628 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_3" -> "1629 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"1629 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_3" -> "1644 bert/encoder/layer_11/attention/self/key/MatMul" [label="[]", style=solid]; +"1630 bert/encoder/layer_11/attention/self/value/MatMul" -> "1631 bert/encoder/layer_11/attention/self/value/BiasAdd" [label="[]", style=solid]; +"1631 bert/encoder/layer_11/attention/self/value/BiasAdd" -> "1632 bert/encoder/layer_11/attention/self/Reshape_2" [label="[]", style=solid]; +"1632 bert/encoder/layer_11/attention/self/Reshape_2" -> "1633 bert/encoder/layer_11/attention/self/transpose_2" [label="[]", style=solid]; +"1633 bert/encoder/layer_11/attention/self/transpose_2" -> "1658 bert/encoder/layer_11/attention/self/MatMul_1" [label="[]", style=solid]; +"1634 QuantizeLinear_bert/encoder/layer_11/attention/self/query/kernel^0_1" -> "1635 DequantizeLinear_bert/encoder/layer_11/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"1635 DequantizeLinear_bert/encoder/layer_11/attention/self/query/kernel^0_1" -> "1636 bert/encoder/layer_11/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"1636 bert/encoder/layer_11/attention/self/query/MatMul" -> "1637 bert/encoder/layer_11/attention/self/query/BiasAdd" [label="[]", style=solid]; +"1637 bert/encoder/layer_11/attention/self/query/BiasAdd" -> "1638 QuantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"1638 QuantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd^0_1" -> "1639 DequantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"1639 DequantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd^0_1" -> "1640 bert/encoder/layer_11/attention/self/Reshape" [label="[]", style=solid]; +"1640 bert/encoder/layer_11/attention/self/Reshape" -> "1641 bert/encoder/layer_11/attention/self/transpose" [label="[]", style=solid]; +"1641 bert/encoder/layer_11/attention/self/transpose" -> "1651 bert/encoder/layer_11/attention/self/MatMul" [label="[]", style=solid]; +"1642 QuantizeLinear_bert/encoder/layer_11/attention/self/key/kernel^0_1" -> "1643 DequantizeLinear_bert/encoder/layer_11/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"1643 DequantizeLinear_bert/encoder/layer_11/attention/self/key/kernel^0_1" -> "1644 bert/encoder/layer_11/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"1644 bert/encoder/layer_11/attention/self/key/MatMul" -> "1645 bert/encoder/layer_11/attention/self/key/BiasAdd" [label="[]", style=solid]; +"1645 bert/encoder/layer_11/attention/self/key/BiasAdd" -> "1646 QuantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"1646 QuantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd^0_1" -> "1647 DequantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"1647 DequantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd^0_1" -> "1648 bert/encoder/layer_11/attention/self/Reshape_1" [label="[]", style=solid]; +"1648 bert/encoder/layer_11/attention/self/Reshape_1" -> "1649 bert/encoder/layer_11/attention/self/transpose_1" [label="[]", style=solid]; +"1649 bert/encoder/layer_11/attention/self/transpose_1" -> "1650 bert/encoder/layer_11/attention/self/MatMul__460" [label="[]", style=solid]; +"1650 bert/encoder/layer_11/attention/self/MatMul__460" -> "1651 bert/encoder/layer_11/attention/self/MatMul" [label="[]", style=solid]; +"1651 bert/encoder/layer_11/attention/self/MatMul" -> "1652 bert/encoder/layer_11/attention/self/Mul" [label="[]", style=solid]; +"1652 bert/encoder/layer_11/attention/self/Mul" -> "1653 bert/encoder/layer_11/attention/self/add" [label="[]", style=solid]; +"1653 bert/encoder/layer_11/attention/self/add" -> "1654 Shape_nncf_1324" [label="[]", style=solid]; +"1653 bert/encoder/layer_11/attention/self/add" -> "1655 Flatten_nncf_1325" [label="[]", style=solid]; +"1654 Shape_nncf_1324" -> "1657 Reshape_nncf_1327" [label="[-1]", style=dashed]; +"1655 Flatten_nncf_1325" -> "1656 bert/encoder/layer_11/attention/self/Softmax" [label="[]", style=solid]; +"1656 bert/encoder/layer_11/attention/self/Softmax" -> "1657 Reshape_nncf_1327" [label="[]", style=solid]; +"1657 Reshape_nncf_1327" -> "1658 bert/encoder/layer_11/attention/self/MatMul_1" [label="[]", style=solid]; +"1658 bert/encoder/layer_11/attention/self/MatMul_1" -> "1659 QuantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"1659 QuantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1^0_1" -> "1660 DequantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"1660 DequantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1^0_1" -> "1661 bert/encoder/layer_11/attention/self/transpose_3" [label="[]", style=solid]; +"1661 bert/encoder/layer_11/attention/self/transpose_3" -> "1662 bert/encoder/layer_11/attention/self/Reshape_3" [label="[]", style=solid]; +"1662 bert/encoder/layer_11/attention/self/Reshape_3" -> "1665 bert/encoder/layer_11/attention/output/dense/MatMul" [label="[]", style=solid]; +"1663 QuantizeLinear_bert/encoder/layer_11/attention/output/dense/kernel^0_1" -> "1664 DequantizeLinear_bert/encoder/layer_11/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"1664 DequantizeLinear_bert/encoder/layer_11/attention/output/dense/kernel^0_1" -> "1665 bert/encoder/layer_11/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"1665 bert/encoder/layer_11/attention/output/dense/MatMul" -> "1666 bert/encoder/layer_11/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"1666 bert/encoder/layer_11/attention/output/dense/BiasAdd" -> "1667 bert/encoder/layer_11/attention/output/add" [label="[]", style=solid]; +"1667 bert/encoder/layer_11/attention/output/add" -> "1668 bert/encoder/layer_11/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1667 bert/encoder/layer_11/attention/output/add" -> "1670 bert/encoder/layer_11/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1667 bert/encoder/layer_11/attention/output/add" -> "1679 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1668 bert/encoder/layer_11/attention/output/LayerNorm/moments/mean" -> "1669 bert/encoder/layer_11/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1668 bert/encoder/layer_11/attention/output/LayerNorm/moments/mean" -> "1677 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1669 bert/encoder/layer_11/attention/output/LayerNorm/moments/StopGradient" -> "1670 bert/encoder/layer_11/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1670 bert/encoder/layer_11/attention/output/LayerNorm/moments/SquaredDifference" -> "1671 bert/encoder/layer_11/attention/output/LayerNorm/moments/SquaredDifference__463" [label="[]", style=solid]; +"1671 bert/encoder/layer_11/attention/output/LayerNorm/moments/SquaredDifference__463" -> "1672 bert/encoder/layer_11/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1672 bert/encoder/layer_11/attention/output/LayerNorm/moments/variance" -> "1673 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1673 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add" -> "1674 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1674 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/Rsqrt" -> "1675 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/Rsqrt__465" [label="[]", style=solid]; +"1675 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/Rsqrt__465" -> "1676 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1676 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul" -> "1677 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1676 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul" -> "1679 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1677 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_2" -> "1678 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1678 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/sub" -> "1680 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1679 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_1" -> "1680 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1680 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1" -> "1681 QuantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1680 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1" -> "1701 bert/encoder/layer_11/output/add" [label="[]", style=solid]; +"1681 QuantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1682 DequantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1682 DequantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1685 bert/encoder/layer_11/intermediate/dense/MatMul" [label="[]", style=solid]; +"1683 QuantizeLinear_bert/encoder/layer_11/intermediate/dense/kernel^0_1" -> "1684 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"1684 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/kernel^0_1" -> "1685 bert/encoder/layer_11/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"1685 bert/encoder/layer_11/intermediate/dense/MatMul" -> "1686 bert/encoder/layer_11/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"1686 bert/encoder/layer_11/intermediate/dense/BiasAdd" -> "1687 bert/encoder/layer_11/intermediate/dense/Pow" [label="[]", style=solid]; +"1686 bert/encoder/layer_11/intermediate/dense/BiasAdd" -> "1689 bert/encoder/layer_11/intermediate/dense/add" [label="[]", style=solid]; +"1686 bert/encoder/layer_11/intermediate/dense/BiasAdd" -> "1694 bert/encoder/layer_11/intermediate/dense/mul_3" [label="[]", style=solid]; +"1687 bert/encoder/layer_11/intermediate/dense/Pow" -> "1688 bert/encoder/layer_11/intermediate/dense/mul" [label="[]", style=solid]; +"1688 bert/encoder/layer_11/intermediate/dense/mul" -> "1689 bert/encoder/layer_11/intermediate/dense/add" [label="[]", style=solid]; +"1689 bert/encoder/layer_11/intermediate/dense/add" -> "1690 bert/encoder/layer_11/intermediate/dense/mul_1" [label="[]", style=solid]; +"1690 bert/encoder/layer_11/intermediate/dense/mul_1" -> "1691 bert/encoder/layer_11/intermediate/dense/Tanh" [label="[]", style=solid]; +"1691 bert/encoder/layer_11/intermediate/dense/Tanh" -> "1692 bert/encoder/layer_11/intermediate/dense/add_1" [label="[]", style=solid]; +"1692 bert/encoder/layer_11/intermediate/dense/add_1" -> "1693 bert/encoder/layer_11/intermediate/dense/mul_2" [label="[]", style=solid]; +"1693 bert/encoder/layer_11/intermediate/dense/mul_2" -> "1694 bert/encoder/layer_11/intermediate/dense/mul_3" [label="[]", style=solid]; +"1694 bert/encoder/layer_11/intermediate/dense/mul_3" -> "1695 QuantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"1695 QuantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3^0_1" -> "1696 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"1696 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3^0_1" -> "1699 bert/encoder/layer_11/output/dense/MatMul" [label="[]", style=solid]; +"1697 QuantizeLinear_bert/encoder/layer_11/output/dense/kernel^0_1" -> "1698 DequantizeLinear_bert/encoder/layer_11/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"1698 DequantizeLinear_bert/encoder/layer_11/output/dense/kernel^0_1" -> "1699 bert/encoder/layer_11/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"1699 bert/encoder/layer_11/output/dense/MatMul" -> "1700 bert/encoder/layer_11/output/dense/BiasAdd" [label="[]", style=solid]; +"1700 bert/encoder/layer_11/output/dense/BiasAdd" -> "1701 bert/encoder/layer_11/output/add" [label="[]", style=solid]; +"1701 bert/encoder/layer_11/output/add" -> "1702 bert/encoder/layer_11/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1701 bert/encoder/layer_11/output/add" -> "1704 bert/encoder/layer_11/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1701 bert/encoder/layer_11/output/add" -> "1713 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1702 bert/encoder/layer_11/output/LayerNorm/moments/mean" -> "1703 bert/encoder/layer_11/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1702 bert/encoder/layer_11/output/LayerNorm/moments/mean" -> "1711 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1703 bert/encoder/layer_11/output/LayerNorm/moments/StopGradient" -> "1704 bert/encoder/layer_11/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1704 bert/encoder/layer_11/output/LayerNorm/moments/SquaredDifference" -> "1705 bert/encoder/layer_11/output/LayerNorm/moments/SquaredDifference__467" [label="[]", style=solid]; +"1705 bert/encoder/layer_11/output/LayerNorm/moments/SquaredDifference__467" -> "1706 bert/encoder/layer_11/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1706 bert/encoder/layer_11/output/LayerNorm/moments/variance" -> "1707 bert/encoder/layer_11/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1707 bert/encoder/layer_11/output/LayerNorm/batchnorm/add" -> "1708 bert/encoder/layer_11/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1708 bert/encoder/layer_11/output/LayerNorm/batchnorm/Rsqrt" -> "1709 bert/encoder/layer_11/output/LayerNorm/batchnorm/Rsqrt__469" [label="[]", style=solid]; +"1709 bert/encoder/layer_11/output/LayerNorm/batchnorm/Rsqrt__469" -> "1710 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1710 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul" -> "1711 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1710 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul" -> "1713 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1711 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_2" -> "1712 bert/encoder/layer_11/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1712 bert/encoder/layer_11/output/LayerNorm/batchnorm/sub" -> "1714 bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1713 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_1" -> "1714 bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1714 bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1" -> "1715 QuantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1715 QuantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1^0_1" -> "1716 DequantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1716 DequantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1^0_1" -> "1717 bert/encoder/Reshape_13" [label="[]", style=solid]; +"1717 bert/encoder/Reshape_13" -> "1718 Shape_1" [label="[]", style=solid]; +"1717 bert/encoder/Reshape_13" -> "1733 Reshape" [label="[]", style=solid]; +"1718 Shape_1" -> "1719 Shape_1__472" [label="[-1]", style=dashed]; +"1719 Shape_1__472" -> "1720 strided_slice_1" [label="[-1]", style=solid]; +"1720 strided_slice_1" -> "1722 strided_slice_1__476" [label="[-1]", style=solid]; +"1721 Constant_nncf_1377" -> "1722 strided_slice_1__476" [label="[1]", style=dashed]; +"1722 strided_slice_1__476" -> "1723 strided_slice_1__477" [label="[]", style=solid]; +"1723 strided_slice_1__477" -> "1724 mul" [label="[]", style=dashed]; +"1723 strided_slice_1__477" -> "1730 Reshape_1/shape_Unsqueeze__478" [label="[]", style=dashed]; +"1724 mul" -> "1726 Reshape/shape_Unsqueeze__482" [label="[]", style=dashed]; +"1725 Constant_nncf_1381" -> "1726 Reshape/shape_Unsqueeze__482" [label="[1]", style=dashed]; +"1726 Reshape/shape_Unsqueeze__482" -> "1727 Reshape/shape_Concat__484" [label="[1]", style=dashed]; +"1727 Reshape/shape_Concat__484" -> "1728 Reshape__485" [label="[2]", style=dashed]; +"1728 Reshape__485" -> "1733 Reshape" [label="[2]", style=dashed]; +"1729 Constant_nncf_1385" -> "1730 Reshape_1/shape_Unsqueeze__478" [label="[1]", style=dashed]; +"1730 Reshape_1/shape_Unsqueeze__478" -> "1731 Reshape_1/shape_Concat__481" [label="[1]", style=dashed]; +"1731 Reshape_1/shape_Concat__481" -> "1732 Reshape_1__487" [label="[3]", style=dashed]; +"1732 Reshape_1__487" -> "1738 Reshape_1" [label="[3]", style=dashed]; +"1733 Reshape" -> "1736 MatMul" [label="[]", style=solid]; +"1734 QuantizeLinear_MatMul__486^0_1" -> "1735 DequantizeLinear_MatMul__486^0_1" [label="[768, 2]", style=dashed]; +"1735 DequantizeLinear_MatMul__486^0_1" -> "1736 MatMul" [label="[768, 2]", style=solid]; +"1736 MatMul" -> "1737 BiasAdd" [label="[]", style=solid]; +"1737 BiasAdd" -> "1738 Reshape_1" [label="[]", style=solid]; +"1738 Reshape_1" -> "1739 transpose" [label="[]", style=solid]; +"1739 transpose" -> "1740 unstack" [label="[]", style=solid]; +"1740 unstack" -> "1742 unstack__490" [label="[]", style=solid]; +"1740 unstack" -> "1745 unstack__488" [label="[]", style=solid]; +"1741 Constant_nncf_1395" -> "1742 unstack__490" [label="[1]", style=dashed]; +"1742 unstack__490" -> "1743 unstack_graph_outputs_Identity__4" [label="[]", style=solid]; +"1743 unstack_graph_outputs_Identity__4" -> "1751 nncf_model_output_0" [label="[-1, 256]", style=solid]; +"1744 Constant_nncf_1398" -> "1745 unstack__488" [label="[1]", style=dashed]; +"1745 unstack__488" -> "1746 unstack_graph_outputs_Identity__7" [label="[]", style=solid]; +"1746 unstack_graph_outputs_Identity__7" -> "1752 nncf_model_output_1" [label="[-1, 256]", style=solid]; +"1747 nncf_model_input_0" -> "0 unique_ids_graph_outputs_Identity__10" [label="[-1]", style=dashed]; +"1748 nncf_model_input_1" -> "316 bert/embeddings/Reshape_2" [label="[-1, 256]", style=dashed]; +"1749 nncf_model_input_2" -> "269 bert/encoder/Reshape" [label="[-1, 256]", style=dashed]; +"1750 nncf_model_input_3" -> "245 bert/encoder/Shape" [label="[-1, 256]", style=dashed]; +"1750 nncf_model_input_3" -> "322 bert/embeddings/ExpandDims" [label="[-1, 256]", style=dashed]; } diff --git a/tests/onnx/data/reference_graphs/quantization/gpt2-10.dot b/tests/onnx/data/reference_graphs/quantization/gpt2-10.dot index a9338508e43..169f6110f1b 100644 --- a/tests/onnx/data/reference_graphs/quantization/gpt2-10.dot +++ b/tests/onnx/data/reference_graphs/quantization/gpt2-10.dot @@ -8,6173 +8,6995 @@ strict digraph { "6 Shape_6" [id=6, type=Shape]; "7 Constant_7" [id=7, type=Constant]; "8 Gather_8" [id=8, type=Gather]; -"9 Unsqueeze_9" [id=9, type=Unsqueeze]; -"10 Concat_10" [id=10, type=Concat]; -"11 Reshape_11" [id=11, type=Reshape]; -"12 Unsqueeze_12" [id=12, type=Unsqueeze]; -"13 Sub_13" [id=13, type=Sub]; -"14 Div_14" [id=14, type=Div]; -"15 ConstantOfShape_15" [id=15, type=ConstantOfShape]; -"16 NonZero_16" [id=16, type=NonZero]; -"17 Transpose_17" [id=17, type=Transpose]; -"18 Squeeze_18" [id=18, type=Squeeze]; -"19 Mul_19" [id=19, type=Mul]; -"20 Add_20" [id=20, type=Add]; -"21 Cast_21" [id=21, type=Cast]; -"22 Unsqueeze_22" [id=22, type=Unsqueeze]; -"23 Unsqueeze_23" [id=23, type=Unsqueeze]; -"24 Concat_24" [id=24, type=Concat]; -"25 Reshape_25" [id=25, type=Reshape]; -"26 QuantizeLinear_wte.weight_1" [id=26, type=QuantizeLinear]; -"27 DequantizeLinear_wte.weight_1" [id=27, type=DequantizeLinear]; -"28 Gather_26" [id=28, type=Gather]; -"29 QuantizeLinear_wpe.weight_1" [id=29, type=QuantizeLinear]; -"30 DequantizeLinear_wpe.weight_1" [id=30, type=DequantizeLinear]; -"31 Gather_27" [id=31, type=Gather]; -"32 Add_28" [id=32, type=Add]; -"33 Shape_29" [id=33, type=Shape]; -"34 Constant_30" [id=34, type=Constant]; -"35 Gather_31" [id=35, type=Gather]; -"36 ReduceMean_32" [id=36, type=ReduceMean]; -"37 Sub_33" [id=37, type=Sub]; -"38 Constant_34" [id=38, type=Constant]; -"39 Pow_35" [id=39, type=Pow]; -"40 ReduceMean_36" [id=40, type=ReduceMean]; -"41 Constant_37" [id=41, type=Constant]; -"42 Add_38" [id=42, type=Add]; -"43 Sqrt_39" [id=43, type=Sqrt]; -"44 Div_40" [id=44, type=Div]; -"45 Mul_41" [id=45, type=Mul]; -"46 Add_42" [id=46, type=Add]; -"47 QuantizeLinear_211_1" [id=47, type=QuantizeLinear]; -"48 DequantizeLinear_211_1" [id=48, type=DequantizeLinear]; -"49 Shape_43" [id=49, type=Shape]; -"50 Constant_44" [id=50, type=Constant]; -"51 Gather_45" [id=51, type=Gather]; -"52 Shape_46" [id=52, type=Shape]; -"53 Constant_47" [id=53, type=Constant]; -"54 Gather_48" [id=54, type=Gather]; -"55 Shape_49" [id=55, type=Shape]; -"56 Constant_50" [id=56, type=Constant]; -"57 Gather_51" [id=57, type=Gather]; -"58 Unsqueeze_52" [id=58, type=Unsqueeze]; -"59 Concat_53" [id=59, type=Concat]; -"60 Reshape_54" [id=60, type=Reshape]; -"61 QuantizeLinear_h.0.attn.c_attn.weight_1" [id=61, type=QuantizeLinear]; -"62 DequantizeLinear_h.0.attn.c_attn.weight_1" [id=62, type=DequantizeLinear]; -"63 Gemm_55" [id=63, type=Gemm]; -"64 Unsqueeze_56" [id=64, type=Unsqueeze]; -"65 Unsqueeze_57" [id=65, type=Unsqueeze]; -"66 Concat_58" [id=66, type=Concat]; -"67 Reshape_59" [id=67, type=Reshape]; -"68 Split_60" [id=68, type=Split]; -"69 QuantizeLinear_query.1_1" [id=69, type=QuantizeLinear]; -"70 DequantizeLinear_query.1_1" [id=70, type=DequantizeLinear]; -"71 Shape_61" [id=71, type=Shape]; -"72 Constant_62" [id=72, type=Constant]; -"73 Gather_63" [id=73, type=Gather]; -"74 Shape_64" [id=74, type=Shape]; -"75 Constant_65" [id=75, type=Constant]; -"76 Gather_66" [id=76, type=Gather]; -"77 Shape_67" [id=77, type=Shape]; -"78 Constant_68" [id=78, type=Constant]; -"79 Gather_69" [id=79, type=Gather]; -"80 Constant_70" [id=80, type=Constant]; -"81 Div_71" [id=81, type=Div]; -"82 Cast_72" [id=82, type=Cast]; -"83 Cast_73" [id=83, type=Cast]; -"84 Unsqueeze_74" [id=84, type=Unsqueeze]; -"85 Unsqueeze_75" [id=85, type=Unsqueeze]; -"86 Unsqueeze_76" [id=86, type=Unsqueeze]; -"87 Concat_77" [id=87, type=Concat]; -"88 Reshape_78" [id=88, type=Reshape]; -"89 Transpose_79" [id=89, type=Transpose]; -"90 Shape_80" [id=90, type=Shape]; -"91 Constant_81" [id=91, type=Constant]; -"92 Gather_82" [id=92, type=Gather]; -"93 Shape_83" [id=93, type=Shape]; -"94 Constant_84" [id=94, type=Constant]; -"95 Gather_85" [id=95, type=Gather]; -"96 Shape_86" [id=96, type=Shape]; -"97 Constant_87" [id=97, type=Constant]; -"98 Gather_88" [id=98, type=Gather]; -"99 Constant_89" [id=99, type=Constant]; -"100 Div_90" [id=100, type=Div]; -"101 Cast_91" [id=101, type=Cast]; -"102 Cast_92" [id=102, type=Cast]; -"103 Unsqueeze_93" [id=103, type=Unsqueeze]; -"104 Unsqueeze_94" [id=104, type=Unsqueeze]; -"105 Unsqueeze_95" [id=105, type=Unsqueeze]; -"106 Concat_96" [id=106, type=Concat]; -"107 Reshape_97" [id=107, type=Reshape]; -"108 QuantizeLinear_276_1" [id=108, type=QuantizeLinear]; -"109 DequantizeLinear_276_1" [id=109, type=DequantizeLinear]; -"110 Transpose_98" [id=110, type=Transpose]; -"111 Shape_99" [id=111, type=Shape]; -"112 Constant_100" [id=112, type=Constant]; -"113 Gather_101" [id=113, type=Gather]; -"114 Shape_102" [id=114, type=Shape]; -"115 Constant_103" [id=115, type=Constant]; -"116 Gather_104" [id=116, type=Gather]; -"117 Shape_105" [id=117, type=Shape]; -"118 Constant_106" [id=118, type=Constant]; -"119 Gather_107" [id=119, type=Gather]; -"120 Constant_108" [id=120, type=Constant]; -"121 Div_109" [id=121, type=Div]; -"122 Cast_110" [id=122, type=Cast]; -"123 Cast_111" [id=123, type=Cast]; -"124 Unsqueeze_112" [id=124, type=Unsqueeze]; -"125 Unsqueeze_113" [id=125, type=Unsqueeze]; -"126 Unsqueeze_114" [id=126, type=Unsqueeze]; -"127 Concat_115" [id=127, type=Concat]; -"128 Reshape_116" [id=128, type=Reshape]; -"129 Transpose_117" [id=129, type=Transpose]; -"130 Transpose_118" [id=130, type=Transpose]; -"131 Unsqueeze_119" [id=131, type=Unsqueeze]; -"132 Unsqueeze_120" [id=132, type=Unsqueeze]; -"133 Concat_121" [id=133, type=Concat]; -"134 MatMul_122" [id=134, type=MatMul]; -"135 Constant_123" [id=135, type=Constant]; -"136 Div_124" [id=136, type=Div]; -"137 Shape_125" [id=137, type=Shape]; -"138 Constant_126" [id=138, type=Constant]; -"139 Gather_127" [id=139, type=Gather]; -"140 Shape_128" [id=140, type=Shape]; -"141 Constant_129" [id=141, type=Constant]; -"142 Gather_130" [id=142, type=Gather]; -"143 Sub_131" [id=143, type=Sub]; -"144 Unsqueeze_132" [id=144, type=Unsqueeze]; -"145 Unsqueeze_133" [id=145, type=Unsqueeze]; -"146 Constant_134" [id=146, type=Constant]; -"147 Slice_135" [id=147, type=Slice]; -"148 Unsqueeze_136" [id=148, type=Unsqueeze]; -"149 Constant_137" [id=149, type=Constant]; -"150 Slice_138" [id=150, type=Slice]; -"151 Mul_139" [id=151, type=Mul]; -"152 Constant_140" [id=152, type=Constant]; -"153 Sub_141" [id=153, type=Sub]; -"154 Constant_142" [id=154, type=Constant]; -"155 Mul_143" [id=155, type=Mul]; -"156 Sub_144" [id=156, type=Sub]; -"157 Softmax_145" [id=157, type=Softmax]; -"158 MatMul_146" [id=158, type=MatMul]; -"159 QuantizeLinear_333_1" [id=159, type=QuantizeLinear]; -"160 DequantizeLinear_333_1" [id=160, type=DequantizeLinear]; -"161 Transpose_147" [id=161, type=Transpose]; -"162 Shape_148" [id=162, type=Shape]; -"163 Constant_149" [id=163, type=Constant]; -"164 Gather_150" [id=164, type=Gather]; -"165 Shape_151" [id=165, type=Shape]; -"166 Constant_152" [id=166, type=Constant]; -"167 Gather_153" [id=167, type=Gather]; -"168 Shape_154" [id=168, type=Shape]; -"169 Constant_155" [id=169, type=Constant]; -"170 Gather_156" [id=170, type=Gather]; -"171 Shape_157" [id=171, type=Shape]; -"172 Constant_158" [id=172, type=Constant]; -"173 Gather_159" [id=173, type=Gather]; -"174 Mul_160" [id=174, type=Mul]; -"175 Unsqueeze_161" [id=175, type=Unsqueeze]; -"176 Unsqueeze_162" [id=176, type=Unsqueeze]; -"177 Unsqueeze_163" [id=177, type=Unsqueeze]; -"178 Concat_164" [id=178, type=Concat]; -"179 Reshape_165" [id=179, type=Reshape]; -"180 Shape_166" [id=180, type=Shape]; -"181 Constant_167" [id=181, type=Constant]; -"182 Gather_168" [id=182, type=Gather]; -"183 Shape_169" [id=183, type=Shape]; -"184 Constant_170" [id=184, type=Constant]; -"185 Gather_171" [id=185, type=Gather]; -"186 Shape_172" [id=186, type=Shape]; -"187 Constant_173" [id=187, type=Constant]; -"188 Gather_174" [id=188, type=Gather]; -"189 Unsqueeze_175" [id=189, type=Unsqueeze]; -"190 Concat_176" [id=190, type=Concat]; -"191 Reshape_177" [id=191, type=Reshape]; -"192 QuantizeLinear_h.0.attn.c_proj.weight_1" [id=192, type=QuantizeLinear]; -"193 DequantizeLinear_h.0.attn.c_proj.weight_1" [id=193, type=DequantizeLinear]; -"194 Gemm_178" [id=194, type=Gemm]; -"195 Unsqueeze_179" [id=195, type=Unsqueeze]; -"196 Unsqueeze_180" [id=196, type=Unsqueeze]; -"197 Concat_181" [id=197, type=Concat]; -"198 Reshape_182" [id=198, type=Reshape]; -"199 Add_183" [id=199, type=Add]; -"200 ReduceMean_184" [id=200, type=ReduceMean]; -"201 Sub_185" [id=201, type=Sub]; -"202 Constant_186" [id=202, type=Constant]; -"203 Pow_187" [id=203, type=Pow]; -"204 ReduceMean_188" [id=204, type=ReduceMean]; -"205 Constant_189" [id=205, type=Constant]; -"206 Add_190" [id=206, type=Add]; -"207 Sqrt_191" [id=207, type=Sqrt]; -"208 Div_192" [id=208, type=Div]; -"209 Mul_193" [id=209, type=Mul]; -"210 Add_194" [id=210, type=Add]; -"211 QuantizeLinear_385_1" [id=211, type=QuantizeLinear]; -"212 DequantizeLinear_385_1" [id=212, type=DequantizeLinear]; -"213 Shape_195" [id=213, type=Shape]; -"214 Constant_196" [id=214, type=Constant]; -"215 Gather_197" [id=215, type=Gather]; -"216 Shape_198" [id=216, type=Shape]; -"217 Constant_199" [id=217, type=Constant]; -"218 Gather_200" [id=218, type=Gather]; -"219 Shape_201" [id=219, type=Shape]; -"220 Constant_202" [id=220, type=Constant]; -"221 Gather_203" [id=221, type=Gather]; -"222 Unsqueeze_204" [id=222, type=Unsqueeze]; -"223 Concat_205" [id=223, type=Concat]; -"224 Reshape_206" [id=224, type=Reshape]; -"225 QuantizeLinear_h.0.mlp.c_fc.weight_1" [id=225, type=QuantizeLinear]; -"226 DequantizeLinear_h.0.mlp.c_fc.weight_1" [id=226, type=DequantizeLinear]; -"227 Gemm_207" [id=227, type=Gemm]; -"228 Unsqueeze_208" [id=228, type=Unsqueeze]; -"229 Unsqueeze_209" [id=229, type=Unsqueeze]; -"230 Concat_210" [id=230, type=Concat]; -"231 Reshape_211" [id=231, type=Reshape]; -"232 Constant_212" [id=232, type=Constant]; -"233 Mul_213" [id=233, type=Mul]; -"234 Constant_214" [id=234, type=Constant]; -"235 Pow_215" [id=235, type=Pow]; -"236 Constant_216" [id=236, type=Constant]; -"237 Mul_217" [id=237, type=Mul]; -"238 Add_218" [id=238, type=Add]; -"239 Constant_219" [id=239, type=Constant]; -"240 Mul_220" [id=240, type=Mul]; -"241 Tanh_221" [id=241, type=Tanh]; -"242 Constant_222" [id=242, type=Constant]; -"243 Add_223" [id=243, type=Add]; -"244 Mul_224" [id=244, type=Mul]; -"245 QuantizeLinear_419_1" [id=245, type=QuantizeLinear]; -"246 DequantizeLinear_419_1" [id=246, type=DequantizeLinear]; -"247 Shape_225" [id=247, type=Shape]; -"248 Constant_226" [id=248, type=Constant]; -"249 Gather_227" [id=249, type=Gather]; -"250 Shape_228" [id=250, type=Shape]; -"251 Constant_229" [id=251, type=Constant]; -"252 Gather_230" [id=252, type=Gather]; -"253 Shape_231" [id=253, type=Shape]; -"254 Constant_232" [id=254, type=Constant]; -"255 Gather_233" [id=255, type=Gather]; -"256 Unsqueeze_234" [id=256, type=Unsqueeze]; -"257 Concat_235" [id=257, type=Concat]; -"258 Reshape_236" [id=258, type=Reshape]; -"259 QuantizeLinear_h.0.mlp.c_proj.weight_1" [id=259, type=QuantizeLinear]; -"260 DequantizeLinear_h.0.mlp.c_proj.weight_1" [id=260, type=DequantizeLinear]; -"261 Gemm_237" [id=261, type=Gemm]; -"262 Unsqueeze_238" [id=262, type=Unsqueeze]; -"263 Unsqueeze_239" [id=263, type=Unsqueeze]; -"264 Concat_240" [id=264, type=Concat]; -"265 Reshape_241" [id=265, type=Reshape]; -"266 Add_242" [id=266, type=Add]; -"267 ReduceMean_243" [id=267, type=ReduceMean]; -"268 Sub_244" [id=268, type=Sub]; -"269 Constant_245" [id=269, type=Constant]; -"270 Pow_246" [id=270, type=Pow]; -"271 ReduceMean_247" [id=271, type=ReduceMean]; -"272 Constant_248" [id=272, type=Constant]; -"273 Add_249" [id=273, type=Add]; -"274 Sqrt_250" [id=274, type=Sqrt]; -"275 Div_251" [id=275, type=Div]; -"276 Mul_252" [id=276, type=Mul]; -"277 Add_253" [id=277, type=Add]; -"278 QuantizeLinear_452_1" [id=278, type=QuantizeLinear]; -"279 DequantizeLinear_452_1" [id=279, type=DequantizeLinear]; -"280 Shape_254" [id=280, type=Shape]; -"281 Constant_255" [id=281, type=Constant]; -"282 Gather_256" [id=282, type=Gather]; -"283 Shape_257" [id=283, type=Shape]; -"284 Constant_258" [id=284, type=Constant]; -"285 Gather_259" [id=285, type=Gather]; -"286 Shape_260" [id=286, type=Shape]; -"287 Constant_261" [id=287, type=Constant]; -"288 Gather_262" [id=288, type=Gather]; -"289 Unsqueeze_263" [id=289, type=Unsqueeze]; -"290 Concat_264" [id=290, type=Concat]; -"291 Reshape_265" [id=291, type=Reshape]; -"292 QuantizeLinear_h.1.attn.c_attn.weight_1" [id=292, type=QuantizeLinear]; -"293 DequantizeLinear_h.1.attn.c_attn.weight_1" [id=293, type=DequantizeLinear]; -"294 Gemm_266" [id=294, type=Gemm]; -"295 Unsqueeze_267" [id=295, type=Unsqueeze]; -"296 Unsqueeze_268" [id=296, type=Unsqueeze]; -"297 Concat_269" [id=297, type=Concat]; -"298 Reshape_270" [id=298, type=Reshape]; -"299 Split_271" [id=299, type=Split]; -"300 QuantizeLinear_query.3_1" [id=300, type=QuantizeLinear]; -"301 DequantizeLinear_query.3_1" [id=301, type=DequantizeLinear]; -"302 Shape_272" [id=302, type=Shape]; -"303 Constant_273" [id=303, type=Constant]; -"304 Gather_274" [id=304, type=Gather]; -"305 Shape_275" [id=305, type=Shape]; -"306 Constant_276" [id=306, type=Constant]; -"307 Gather_277" [id=307, type=Gather]; -"308 Shape_278" [id=308, type=Shape]; -"309 Constant_279" [id=309, type=Constant]; -"310 Gather_280" [id=310, type=Gather]; -"311 Constant_281" [id=311, type=Constant]; -"312 Div_282" [id=312, type=Div]; -"313 Cast_283" [id=313, type=Cast]; -"314 Cast_284" [id=314, type=Cast]; -"315 Unsqueeze_285" [id=315, type=Unsqueeze]; -"316 Unsqueeze_286" [id=316, type=Unsqueeze]; -"317 Unsqueeze_287" [id=317, type=Unsqueeze]; -"318 Concat_288" [id=318, type=Concat]; -"319 Reshape_289" [id=319, type=Reshape]; -"320 Transpose_290" [id=320, type=Transpose]; -"321 Shape_291" [id=321, type=Shape]; -"322 Constant_292" [id=322, type=Constant]; -"323 Gather_293" [id=323, type=Gather]; -"324 Shape_294" [id=324, type=Shape]; -"325 Constant_295" [id=325, type=Constant]; -"326 Gather_296" [id=326, type=Gather]; -"327 Shape_297" [id=327, type=Shape]; -"328 Constant_298" [id=328, type=Constant]; -"329 Gather_299" [id=329, type=Gather]; -"330 Constant_300" [id=330, type=Constant]; -"331 Div_301" [id=331, type=Div]; -"332 Cast_302" [id=332, type=Cast]; -"333 Cast_303" [id=333, type=Cast]; -"334 Unsqueeze_304" [id=334, type=Unsqueeze]; -"335 Unsqueeze_305" [id=335, type=Unsqueeze]; -"336 Unsqueeze_306" [id=336, type=Unsqueeze]; -"337 Concat_307" [id=337, type=Concat]; -"338 Reshape_308" [id=338, type=Reshape]; -"339 QuantizeLinear_517_1" [id=339, type=QuantizeLinear]; -"340 DequantizeLinear_517_1" [id=340, type=DequantizeLinear]; -"341 Transpose_309" [id=341, type=Transpose]; -"342 Shape_310" [id=342, type=Shape]; -"343 Constant_311" [id=343, type=Constant]; -"344 Gather_312" [id=344, type=Gather]; -"345 Shape_313" [id=345, type=Shape]; -"346 Constant_314" [id=346, type=Constant]; -"347 Gather_315" [id=347, type=Gather]; -"348 Shape_316" [id=348, type=Shape]; -"349 Constant_317" [id=349, type=Constant]; -"350 Gather_318" [id=350, type=Gather]; -"351 Constant_319" [id=351, type=Constant]; -"352 Div_320" [id=352, type=Div]; -"353 Cast_321" [id=353, type=Cast]; -"354 Cast_322" [id=354, type=Cast]; -"355 Unsqueeze_323" [id=355, type=Unsqueeze]; -"356 Unsqueeze_324" [id=356, type=Unsqueeze]; -"357 Unsqueeze_325" [id=357, type=Unsqueeze]; -"358 Concat_326" [id=358, type=Concat]; -"359 Reshape_327" [id=359, type=Reshape]; -"360 Transpose_328" [id=360, type=Transpose]; -"361 Transpose_329" [id=361, type=Transpose]; -"362 Unsqueeze_330" [id=362, type=Unsqueeze]; -"363 Unsqueeze_331" [id=363, type=Unsqueeze]; -"364 Concat_332" [id=364, type=Concat]; -"365 MatMul_333" [id=365, type=MatMul]; -"366 Constant_334" [id=366, type=Constant]; -"367 Div_335" [id=367, type=Div]; -"368 Shape_336" [id=368, type=Shape]; -"369 Constant_337" [id=369, type=Constant]; -"370 Gather_338" [id=370, type=Gather]; -"371 Shape_339" [id=371, type=Shape]; -"372 Constant_340" [id=372, type=Constant]; -"373 Gather_341" [id=373, type=Gather]; -"374 Sub_342" [id=374, type=Sub]; -"375 Unsqueeze_343" [id=375, type=Unsqueeze]; -"376 Unsqueeze_344" [id=376, type=Unsqueeze]; -"377 Constant_345" [id=377, type=Constant]; -"378 Slice_346" [id=378, type=Slice]; -"379 Unsqueeze_347" [id=379, type=Unsqueeze]; -"380 Constant_348" [id=380, type=Constant]; -"381 Slice_349" [id=381, type=Slice]; -"382 Mul_350" [id=382, type=Mul]; -"383 Constant_351" [id=383, type=Constant]; -"384 Sub_352" [id=384, type=Sub]; -"385 Constant_353" [id=385, type=Constant]; -"386 Mul_354" [id=386, type=Mul]; -"387 Sub_355" [id=387, type=Sub]; -"388 Softmax_356" [id=388, type=Softmax]; -"389 MatMul_357" [id=389, type=MatMul]; -"390 QuantizeLinear_574_1" [id=390, type=QuantizeLinear]; -"391 DequantizeLinear_574_1" [id=391, type=DequantizeLinear]; -"392 Transpose_358" [id=392, type=Transpose]; -"393 Shape_359" [id=393, type=Shape]; -"394 Constant_360" [id=394, type=Constant]; -"395 Gather_361" [id=395, type=Gather]; -"396 Shape_362" [id=396, type=Shape]; -"397 Constant_363" [id=397, type=Constant]; -"398 Gather_364" [id=398, type=Gather]; -"399 Shape_365" [id=399, type=Shape]; -"400 Constant_366" [id=400, type=Constant]; -"401 Gather_367" [id=401, type=Gather]; -"402 Shape_368" [id=402, type=Shape]; -"403 Constant_369" [id=403, type=Constant]; -"404 Gather_370" [id=404, type=Gather]; -"405 Mul_371" [id=405, type=Mul]; -"406 Unsqueeze_372" [id=406, type=Unsqueeze]; -"407 Unsqueeze_373" [id=407, type=Unsqueeze]; -"408 Unsqueeze_374" [id=408, type=Unsqueeze]; -"409 Concat_375" [id=409, type=Concat]; -"410 Reshape_376" [id=410, type=Reshape]; -"411 Shape_377" [id=411, type=Shape]; -"412 Constant_378" [id=412, type=Constant]; -"413 Gather_379" [id=413, type=Gather]; -"414 Shape_380" [id=414, type=Shape]; -"415 Constant_381" [id=415, type=Constant]; -"416 Gather_382" [id=416, type=Gather]; -"417 Shape_383" [id=417, type=Shape]; -"418 Constant_384" [id=418, type=Constant]; -"419 Gather_385" [id=419, type=Gather]; -"420 Unsqueeze_386" [id=420, type=Unsqueeze]; -"421 Concat_387" [id=421, type=Concat]; -"422 Reshape_388" [id=422, type=Reshape]; -"423 QuantizeLinear_h.1.attn.c_proj.weight_1" [id=423, type=QuantizeLinear]; -"424 DequantizeLinear_h.1.attn.c_proj.weight_1" [id=424, type=DequantizeLinear]; -"425 Gemm_389" [id=425, type=Gemm]; -"426 Unsqueeze_390" [id=426, type=Unsqueeze]; -"427 Unsqueeze_391" [id=427, type=Unsqueeze]; -"428 Concat_392" [id=428, type=Concat]; -"429 Reshape_393" [id=429, type=Reshape]; -"430 Add_394" [id=430, type=Add]; -"431 ReduceMean_395" [id=431, type=ReduceMean]; -"432 Sub_396" [id=432, type=Sub]; -"433 Constant_397" [id=433, type=Constant]; -"434 Pow_398" [id=434, type=Pow]; -"435 ReduceMean_399" [id=435, type=ReduceMean]; -"436 Constant_400" [id=436, type=Constant]; -"437 Add_401" [id=437, type=Add]; -"438 Sqrt_402" [id=438, type=Sqrt]; -"439 Div_403" [id=439, type=Div]; -"440 Mul_404" [id=440, type=Mul]; -"441 Add_405" [id=441, type=Add]; -"442 QuantizeLinear_626_1" [id=442, type=QuantizeLinear]; -"443 DequantizeLinear_626_1" [id=443, type=DequantizeLinear]; -"444 Shape_406" [id=444, type=Shape]; -"445 Constant_407" [id=445, type=Constant]; -"446 Gather_408" [id=446, type=Gather]; -"447 Shape_409" [id=447, type=Shape]; -"448 Constant_410" [id=448, type=Constant]; -"449 Gather_411" [id=449, type=Gather]; -"450 Shape_412" [id=450, type=Shape]; -"451 Constant_413" [id=451, type=Constant]; -"452 Gather_414" [id=452, type=Gather]; -"453 Unsqueeze_415" [id=453, type=Unsqueeze]; -"454 Concat_416" [id=454, type=Concat]; -"455 Reshape_417" [id=455, type=Reshape]; -"456 QuantizeLinear_h.1.mlp.c_fc.weight_1" [id=456, type=QuantizeLinear]; -"457 DequantizeLinear_h.1.mlp.c_fc.weight_1" [id=457, type=DequantizeLinear]; -"458 Gemm_418" [id=458, type=Gemm]; -"459 Unsqueeze_419" [id=459, type=Unsqueeze]; -"460 Unsqueeze_420" [id=460, type=Unsqueeze]; -"461 Concat_421" [id=461, type=Concat]; -"462 Reshape_422" [id=462, type=Reshape]; -"463 Constant_423" [id=463, type=Constant]; -"464 Mul_424" [id=464, type=Mul]; -"465 Constant_425" [id=465, type=Constant]; -"466 Pow_426" [id=466, type=Pow]; -"467 Constant_427" [id=467, type=Constant]; -"468 Mul_428" [id=468, type=Mul]; -"469 Add_429" [id=469, type=Add]; -"470 Constant_430" [id=470, type=Constant]; -"471 Mul_431" [id=471, type=Mul]; -"472 Tanh_432" [id=472, type=Tanh]; -"473 Constant_433" [id=473, type=Constant]; -"474 Add_434" [id=474, type=Add]; -"475 Mul_435" [id=475, type=Mul]; -"476 QuantizeLinear_660_1" [id=476, type=QuantizeLinear]; -"477 DequantizeLinear_660_1" [id=477, type=DequantizeLinear]; -"478 Shape_436" [id=478, type=Shape]; -"479 Constant_437" [id=479, type=Constant]; -"480 Gather_438" [id=480, type=Gather]; -"481 Shape_439" [id=481, type=Shape]; -"482 Constant_440" [id=482, type=Constant]; -"483 Gather_441" [id=483, type=Gather]; -"484 Shape_442" [id=484, type=Shape]; -"485 Constant_443" [id=485, type=Constant]; -"486 Gather_444" [id=486, type=Gather]; -"487 Unsqueeze_445" [id=487, type=Unsqueeze]; -"488 Concat_446" [id=488, type=Concat]; -"489 Reshape_447" [id=489, type=Reshape]; -"490 QuantizeLinear_h.1.mlp.c_proj.weight_1" [id=490, type=QuantizeLinear]; -"491 DequantizeLinear_h.1.mlp.c_proj.weight_1" [id=491, type=DequantizeLinear]; -"492 Gemm_448" [id=492, type=Gemm]; -"493 Unsqueeze_449" [id=493, type=Unsqueeze]; -"494 Unsqueeze_450" [id=494, type=Unsqueeze]; -"495 Concat_451" [id=495, type=Concat]; -"496 Reshape_452" [id=496, type=Reshape]; -"497 Add_453" [id=497, type=Add]; -"498 ReduceMean_454" [id=498, type=ReduceMean]; -"499 Sub_455" [id=499, type=Sub]; -"500 Constant_456" [id=500, type=Constant]; -"501 Pow_457" [id=501, type=Pow]; -"502 ReduceMean_458" [id=502, type=ReduceMean]; -"503 Constant_459" [id=503, type=Constant]; -"504 Add_460" [id=504, type=Add]; -"505 Sqrt_461" [id=505, type=Sqrt]; -"506 Div_462" [id=506, type=Div]; -"507 Mul_463" [id=507, type=Mul]; -"508 Add_464" [id=508, type=Add]; -"509 QuantizeLinear_693_1" [id=509, type=QuantizeLinear]; -"510 DequantizeLinear_693_1" [id=510, type=DequantizeLinear]; -"511 Shape_465" [id=511, type=Shape]; -"512 Constant_466" [id=512, type=Constant]; -"513 Gather_467" [id=513, type=Gather]; -"514 Shape_468" [id=514, type=Shape]; -"515 Constant_469" [id=515, type=Constant]; -"516 Gather_470" [id=516, type=Gather]; -"517 Shape_471" [id=517, type=Shape]; -"518 Constant_472" [id=518, type=Constant]; -"519 Gather_473" [id=519, type=Gather]; -"520 Unsqueeze_474" [id=520, type=Unsqueeze]; -"521 Concat_475" [id=521, type=Concat]; -"522 Reshape_476" [id=522, type=Reshape]; -"523 QuantizeLinear_h.2.attn.c_attn.weight_1" [id=523, type=QuantizeLinear]; -"524 DequantizeLinear_h.2.attn.c_attn.weight_1" [id=524, type=DequantizeLinear]; -"525 Gemm_477" [id=525, type=Gemm]; -"526 Unsqueeze_478" [id=526, type=Unsqueeze]; -"527 Unsqueeze_479" [id=527, type=Unsqueeze]; -"528 Concat_480" [id=528, type=Concat]; -"529 Reshape_481" [id=529, type=Reshape]; -"530 Split_482" [id=530, type=Split]; -"531 QuantizeLinear_query.5_1" [id=531, type=QuantizeLinear]; -"532 DequantizeLinear_query.5_1" [id=532, type=DequantizeLinear]; -"533 Shape_483" [id=533, type=Shape]; -"534 Constant_484" [id=534, type=Constant]; -"535 Gather_485" [id=535, type=Gather]; -"536 Shape_486" [id=536, type=Shape]; -"537 Constant_487" [id=537, type=Constant]; -"538 Gather_488" [id=538, type=Gather]; -"539 Shape_489" [id=539, type=Shape]; -"540 Constant_490" [id=540, type=Constant]; -"541 Gather_491" [id=541, type=Gather]; -"542 Constant_492" [id=542, type=Constant]; -"543 Div_493" [id=543, type=Div]; -"544 Cast_494" [id=544, type=Cast]; -"545 Cast_495" [id=545, type=Cast]; -"546 Unsqueeze_496" [id=546, type=Unsqueeze]; -"547 Unsqueeze_497" [id=547, type=Unsqueeze]; -"548 Unsqueeze_498" [id=548, type=Unsqueeze]; -"549 Concat_499" [id=549, type=Concat]; -"550 Reshape_500" [id=550, type=Reshape]; -"551 Transpose_501" [id=551, type=Transpose]; -"552 Shape_502" [id=552, type=Shape]; -"553 Constant_503" [id=553, type=Constant]; -"554 Gather_504" [id=554, type=Gather]; -"555 Shape_505" [id=555, type=Shape]; -"556 Constant_506" [id=556, type=Constant]; -"557 Gather_507" [id=557, type=Gather]; -"558 Shape_508" [id=558, type=Shape]; -"559 Constant_509" [id=559, type=Constant]; -"560 Gather_510" [id=560, type=Gather]; -"561 Constant_511" [id=561, type=Constant]; -"562 Div_512" [id=562, type=Div]; -"563 Cast_513" [id=563, type=Cast]; -"564 Cast_514" [id=564, type=Cast]; -"565 Unsqueeze_515" [id=565, type=Unsqueeze]; -"566 Unsqueeze_516" [id=566, type=Unsqueeze]; -"567 Unsqueeze_517" [id=567, type=Unsqueeze]; -"568 Concat_518" [id=568, type=Concat]; -"569 Reshape_519" [id=569, type=Reshape]; -"570 QuantizeLinear_758_1" [id=570, type=QuantizeLinear]; -"571 DequantizeLinear_758_1" [id=571, type=DequantizeLinear]; -"572 Transpose_520" [id=572, type=Transpose]; -"573 Shape_521" [id=573, type=Shape]; -"574 Constant_522" [id=574, type=Constant]; -"575 Gather_523" [id=575, type=Gather]; -"576 Shape_524" [id=576, type=Shape]; -"577 Constant_525" [id=577, type=Constant]; -"578 Gather_526" [id=578, type=Gather]; -"579 Shape_527" [id=579, type=Shape]; -"580 Constant_528" [id=580, type=Constant]; -"581 Gather_529" [id=581, type=Gather]; -"582 Constant_530" [id=582, type=Constant]; -"583 Div_531" [id=583, type=Div]; -"584 Cast_532" [id=584, type=Cast]; -"585 Cast_533" [id=585, type=Cast]; -"586 Unsqueeze_534" [id=586, type=Unsqueeze]; -"587 Unsqueeze_535" [id=587, type=Unsqueeze]; -"588 Unsqueeze_536" [id=588, type=Unsqueeze]; -"589 Concat_537" [id=589, type=Concat]; -"590 Reshape_538" [id=590, type=Reshape]; -"591 Transpose_539" [id=591, type=Transpose]; -"592 Transpose_540" [id=592, type=Transpose]; -"593 Unsqueeze_541" [id=593, type=Unsqueeze]; -"594 Unsqueeze_542" [id=594, type=Unsqueeze]; -"595 Concat_543" [id=595, type=Concat]; -"596 MatMul_544" [id=596, type=MatMul]; -"597 Constant_545" [id=597, type=Constant]; -"598 Div_546" [id=598, type=Div]; -"599 Shape_547" [id=599, type=Shape]; -"600 Constant_548" [id=600, type=Constant]; -"601 Gather_549" [id=601, type=Gather]; -"602 Shape_550" [id=602, type=Shape]; -"603 Constant_551" [id=603, type=Constant]; -"604 Gather_552" [id=604, type=Gather]; -"605 Sub_553" [id=605, type=Sub]; -"606 Unsqueeze_554" [id=606, type=Unsqueeze]; -"607 Unsqueeze_555" [id=607, type=Unsqueeze]; -"608 Constant_556" [id=608, type=Constant]; -"609 Slice_557" [id=609, type=Slice]; -"610 Unsqueeze_558" [id=610, type=Unsqueeze]; -"611 Constant_559" [id=611, type=Constant]; -"612 Slice_560" [id=612, type=Slice]; -"613 Mul_561" [id=613, type=Mul]; -"614 Constant_562" [id=614, type=Constant]; -"615 Sub_563" [id=615, type=Sub]; -"616 Constant_564" [id=616, type=Constant]; -"617 Mul_565" [id=617, type=Mul]; -"618 Sub_566" [id=618, type=Sub]; -"619 Softmax_567" [id=619, type=Softmax]; -"620 MatMul_568" [id=620, type=MatMul]; -"621 QuantizeLinear_815_1" [id=621, type=QuantizeLinear]; -"622 DequantizeLinear_815_1" [id=622, type=DequantizeLinear]; -"623 Transpose_569" [id=623, type=Transpose]; -"624 Shape_570" [id=624, type=Shape]; -"625 Constant_571" [id=625, type=Constant]; -"626 Gather_572" [id=626, type=Gather]; -"627 Shape_573" [id=627, type=Shape]; -"628 Constant_574" [id=628, type=Constant]; -"629 Gather_575" [id=629, type=Gather]; -"630 Shape_576" [id=630, type=Shape]; -"631 Constant_577" [id=631, type=Constant]; -"632 Gather_578" [id=632, type=Gather]; -"633 Shape_579" [id=633, type=Shape]; -"634 Constant_580" [id=634, type=Constant]; -"635 Gather_581" [id=635, type=Gather]; -"636 Mul_582" [id=636, type=Mul]; -"637 Unsqueeze_583" [id=637, type=Unsqueeze]; -"638 Unsqueeze_584" [id=638, type=Unsqueeze]; -"639 Unsqueeze_585" [id=639, type=Unsqueeze]; -"640 Concat_586" [id=640, type=Concat]; -"641 Reshape_587" [id=641, type=Reshape]; -"642 Shape_588" [id=642, type=Shape]; -"643 Constant_589" [id=643, type=Constant]; -"644 Gather_590" [id=644, type=Gather]; -"645 Shape_591" [id=645, type=Shape]; -"646 Constant_592" [id=646, type=Constant]; -"647 Gather_593" [id=647, type=Gather]; -"648 Shape_594" [id=648, type=Shape]; -"649 Constant_595" [id=649, type=Constant]; -"650 Gather_596" [id=650, type=Gather]; -"651 Unsqueeze_597" [id=651, type=Unsqueeze]; -"652 Concat_598" [id=652, type=Concat]; -"653 Reshape_599" [id=653, type=Reshape]; -"654 QuantizeLinear_h.2.attn.c_proj.weight_1" [id=654, type=QuantizeLinear]; -"655 DequantizeLinear_h.2.attn.c_proj.weight_1" [id=655, type=DequantizeLinear]; -"656 Gemm_600" [id=656, type=Gemm]; -"657 Unsqueeze_601" [id=657, type=Unsqueeze]; -"658 Unsqueeze_602" [id=658, type=Unsqueeze]; -"659 Concat_603" [id=659, type=Concat]; -"660 Reshape_604" [id=660, type=Reshape]; -"661 Add_605" [id=661, type=Add]; -"662 ReduceMean_606" [id=662, type=ReduceMean]; -"663 Sub_607" [id=663, type=Sub]; -"664 Constant_608" [id=664, type=Constant]; -"665 Pow_609" [id=665, type=Pow]; -"666 ReduceMean_610" [id=666, type=ReduceMean]; -"667 Constant_611" [id=667, type=Constant]; -"668 Add_612" [id=668, type=Add]; -"669 Sqrt_613" [id=669, type=Sqrt]; -"670 Div_614" [id=670, type=Div]; -"671 Mul_615" [id=671, type=Mul]; -"672 Add_616" [id=672, type=Add]; -"673 QuantizeLinear_867_1" [id=673, type=QuantizeLinear]; -"674 DequantizeLinear_867_1" [id=674, type=DequantizeLinear]; -"675 Shape_617" [id=675, type=Shape]; -"676 Constant_618" [id=676, type=Constant]; -"677 Gather_619" [id=677, type=Gather]; -"678 Shape_620" [id=678, type=Shape]; -"679 Constant_621" [id=679, type=Constant]; -"680 Gather_622" [id=680, type=Gather]; -"681 Shape_623" [id=681, type=Shape]; -"682 Constant_624" [id=682, type=Constant]; -"683 Gather_625" [id=683, type=Gather]; -"684 Unsqueeze_626" [id=684, type=Unsqueeze]; -"685 Concat_627" [id=685, type=Concat]; -"686 Reshape_628" [id=686, type=Reshape]; -"687 QuantizeLinear_h.2.mlp.c_fc.weight_1" [id=687, type=QuantizeLinear]; -"688 DequantizeLinear_h.2.mlp.c_fc.weight_1" [id=688, type=DequantizeLinear]; -"689 Gemm_629" [id=689, type=Gemm]; -"690 Unsqueeze_630" [id=690, type=Unsqueeze]; -"691 Unsqueeze_631" [id=691, type=Unsqueeze]; -"692 Concat_632" [id=692, type=Concat]; -"693 Reshape_633" [id=693, type=Reshape]; -"694 Constant_634" [id=694, type=Constant]; -"695 Mul_635" [id=695, type=Mul]; -"696 Constant_636" [id=696, type=Constant]; -"697 Pow_637" [id=697, type=Pow]; -"698 Constant_638" [id=698, type=Constant]; -"699 Mul_639" [id=699, type=Mul]; -"700 Add_640" [id=700, type=Add]; -"701 Constant_641" [id=701, type=Constant]; -"702 Mul_642" [id=702, type=Mul]; -"703 Tanh_643" [id=703, type=Tanh]; -"704 Constant_644" [id=704, type=Constant]; -"705 Add_645" [id=705, type=Add]; -"706 Mul_646" [id=706, type=Mul]; -"707 QuantizeLinear_901_1" [id=707, type=QuantizeLinear]; -"708 DequantizeLinear_901_1" [id=708, type=DequantizeLinear]; -"709 Shape_647" [id=709, type=Shape]; -"710 Constant_648" [id=710, type=Constant]; -"711 Gather_649" [id=711, type=Gather]; -"712 Shape_650" [id=712, type=Shape]; -"713 Constant_651" [id=713, type=Constant]; -"714 Gather_652" [id=714, type=Gather]; -"715 Shape_653" [id=715, type=Shape]; -"716 Constant_654" [id=716, type=Constant]; -"717 Gather_655" [id=717, type=Gather]; -"718 Unsqueeze_656" [id=718, type=Unsqueeze]; -"719 Concat_657" [id=719, type=Concat]; -"720 Reshape_658" [id=720, type=Reshape]; -"721 QuantizeLinear_h.2.mlp.c_proj.weight_1" [id=721, type=QuantizeLinear]; -"722 DequantizeLinear_h.2.mlp.c_proj.weight_1" [id=722, type=DequantizeLinear]; -"723 Gemm_659" [id=723, type=Gemm]; -"724 Unsqueeze_660" [id=724, type=Unsqueeze]; -"725 Unsqueeze_661" [id=725, type=Unsqueeze]; -"726 Concat_662" [id=726, type=Concat]; -"727 Reshape_663" [id=727, type=Reshape]; -"728 Add_664" [id=728, type=Add]; -"729 ReduceMean_665" [id=729, type=ReduceMean]; -"730 Sub_666" [id=730, type=Sub]; -"731 Constant_667" [id=731, type=Constant]; -"732 Pow_668" [id=732, type=Pow]; -"733 ReduceMean_669" [id=733, type=ReduceMean]; -"734 Constant_670" [id=734, type=Constant]; -"735 Add_671" [id=735, type=Add]; -"736 Sqrt_672" [id=736, type=Sqrt]; -"737 Div_673" [id=737, type=Div]; -"738 Mul_674" [id=738, type=Mul]; -"739 Add_675" [id=739, type=Add]; -"740 QuantizeLinear_934_1" [id=740, type=QuantizeLinear]; -"741 DequantizeLinear_934_1" [id=741, type=DequantizeLinear]; -"742 Shape_676" [id=742, type=Shape]; -"743 Constant_677" [id=743, type=Constant]; -"744 Gather_678" [id=744, type=Gather]; -"745 Shape_679" [id=745, type=Shape]; -"746 Constant_680" [id=746, type=Constant]; -"747 Gather_681" [id=747, type=Gather]; -"748 Shape_682" [id=748, type=Shape]; -"749 Constant_683" [id=749, type=Constant]; -"750 Gather_684" [id=750, type=Gather]; -"751 Unsqueeze_685" [id=751, type=Unsqueeze]; -"752 Concat_686" [id=752, type=Concat]; -"753 Reshape_687" [id=753, type=Reshape]; -"754 QuantizeLinear_h.3.attn.c_attn.weight_1" [id=754, type=QuantizeLinear]; -"755 DequantizeLinear_h.3.attn.c_attn.weight_1" [id=755, type=DequantizeLinear]; -"756 Gemm_688" [id=756, type=Gemm]; -"757 Unsqueeze_689" [id=757, type=Unsqueeze]; -"758 Unsqueeze_690" [id=758, type=Unsqueeze]; -"759 Concat_691" [id=759, type=Concat]; -"760 Reshape_692" [id=760, type=Reshape]; -"761 Split_693" [id=761, type=Split]; -"762 QuantizeLinear_query.7_1" [id=762, type=QuantizeLinear]; -"763 DequantizeLinear_query.7_1" [id=763, type=DequantizeLinear]; -"764 Shape_694" [id=764, type=Shape]; -"765 Constant_695" [id=765, type=Constant]; -"766 Gather_696" [id=766, type=Gather]; -"767 Shape_697" [id=767, type=Shape]; -"768 Constant_698" [id=768, type=Constant]; -"769 Gather_699" [id=769, type=Gather]; -"770 Shape_700" [id=770, type=Shape]; -"771 Constant_701" [id=771, type=Constant]; -"772 Gather_702" [id=772, type=Gather]; -"773 Constant_703" [id=773, type=Constant]; -"774 Div_704" [id=774, type=Div]; -"775 Cast_705" [id=775, type=Cast]; -"776 Cast_706" [id=776, type=Cast]; -"777 Unsqueeze_707" [id=777, type=Unsqueeze]; -"778 Unsqueeze_708" [id=778, type=Unsqueeze]; -"779 Unsqueeze_709" [id=779, type=Unsqueeze]; -"780 Concat_710" [id=780, type=Concat]; -"781 Reshape_711" [id=781, type=Reshape]; -"782 Transpose_712" [id=782, type=Transpose]; -"783 Shape_713" [id=783, type=Shape]; -"784 Constant_714" [id=784, type=Constant]; -"785 Gather_715" [id=785, type=Gather]; -"786 Shape_716" [id=786, type=Shape]; -"787 Constant_717" [id=787, type=Constant]; -"788 Gather_718" [id=788, type=Gather]; -"789 Shape_719" [id=789, type=Shape]; -"790 Constant_720" [id=790, type=Constant]; -"791 Gather_721" [id=791, type=Gather]; -"792 Constant_722" [id=792, type=Constant]; -"793 Div_723" [id=793, type=Div]; -"794 Cast_724" [id=794, type=Cast]; -"795 Cast_725" [id=795, type=Cast]; -"796 Unsqueeze_726" [id=796, type=Unsqueeze]; -"797 Unsqueeze_727" [id=797, type=Unsqueeze]; -"798 Unsqueeze_728" [id=798, type=Unsqueeze]; -"799 Concat_729" [id=799, type=Concat]; -"800 Reshape_730" [id=800, type=Reshape]; -"801 QuantizeLinear_999_1" [id=801, type=QuantizeLinear]; -"802 DequantizeLinear_999_1" [id=802, type=DequantizeLinear]; -"803 Transpose_731" [id=803, type=Transpose]; -"804 Shape_732" [id=804, type=Shape]; -"805 Constant_733" [id=805, type=Constant]; -"806 Gather_734" [id=806, type=Gather]; -"807 Shape_735" [id=807, type=Shape]; -"808 Constant_736" [id=808, type=Constant]; -"809 Gather_737" [id=809, type=Gather]; -"810 Shape_738" [id=810, type=Shape]; -"811 Constant_739" [id=811, type=Constant]; -"812 Gather_740" [id=812, type=Gather]; -"813 Constant_741" [id=813, type=Constant]; -"814 Div_742" [id=814, type=Div]; -"815 Cast_743" [id=815, type=Cast]; -"816 Cast_744" [id=816, type=Cast]; -"817 Unsqueeze_745" [id=817, type=Unsqueeze]; -"818 Unsqueeze_746" [id=818, type=Unsqueeze]; -"819 Unsqueeze_747" [id=819, type=Unsqueeze]; -"820 Concat_748" [id=820, type=Concat]; -"821 Reshape_749" [id=821, type=Reshape]; -"822 Transpose_750" [id=822, type=Transpose]; -"823 Transpose_751" [id=823, type=Transpose]; -"824 Unsqueeze_752" [id=824, type=Unsqueeze]; -"825 Unsqueeze_753" [id=825, type=Unsqueeze]; -"826 Concat_754" [id=826, type=Concat]; -"827 MatMul_755" [id=827, type=MatMul]; -"828 Constant_756" [id=828, type=Constant]; -"829 Div_757" [id=829, type=Div]; -"830 Shape_758" [id=830, type=Shape]; -"831 Constant_759" [id=831, type=Constant]; -"832 Gather_760" [id=832, type=Gather]; -"833 Shape_761" [id=833, type=Shape]; -"834 Constant_762" [id=834, type=Constant]; -"835 Gather_763" [id=835, type=Gather]; -"836 Sub_764" [id=836, type=Sub]; -"837 Unsqueeze_765" [id=837, type=Unsqueeze]; -"838 Unsqueeze_766" [id=838, type=Unsqueeze]; -"839 Constant_767" [id=839, type=Constant]; -"840 Slice_768" [id=840, type=Slice]; -"841 Unsqueeze_769" [id=841, type=Unsqueeze]; -"842 Constant_770" [id=842, type=Constant]; -"843 Slice_771" [id=843, type=Slice]; -"844 Mul_772" [id=844, type=Mul]; -"845 Constant_773" [id=845, type=Constant]; -"846 Sub_774" [id=846, type=Sub]; -"847 Constant_775" [id=847, type=Constant]; -"848 Mul_776" [id=848, type=Mul]; -"849 Sub_777" [id=849, type=Sub]; -"850 Softmax_778" [id=850, type=Softmax]; -"851 MatMul_779" [id=851, type=MatMul]; -"852 QuantizeLinear_1056_1" [id=852, type=QuantizeLinear]; -"853 DequantizeLinear_1056_1" [id=853, type=DequantizeLinear]; -"854 Transpose_780" [id=854, type=Transpose]; -"855 Shape_781" [id=855, type=Shape]; -"856 Constant_782" [id=856, type=Constant]; -"857 Gather_783" [id=857, type=Gather]; -"858 Shape_784" [id=858, type=Shape]; -"859 Constant_785" [id=859, type=Constant]; -"860 Gather_786" [id=860, type=Gather]; -"861 Shape_787" [id=861, type=Shape]; -"862 Constant_788" [id=862, type=Constant]; -"863 Gather_789" [id=863, type=Gather]; -"864 Shape_790" [id=864, type=Shape]; -"865 Constant_791" [id=865, type=Constant]; -"866 Gather_792" [id=866, type=Gather]; -"867 Mul_793" [id=867, type=Mul]; -"868 Unsqueeze_794" [id=868, type=Unsqueeze]; -"869 Unsqueeze_795" [id=869, type=Unsqueeze]; -"870 Unsqueeze_796" [id=870, type=Unsqueeze]; -"871 Concat_797" [id=871, type=Concat]; -"872 Reshape_798" [id=872, type=Reshape]; -"873 Shape_799" [id=873, type=Shape]; -"874 Constant_800" [id=874, type=Constant]; -"875 Gather_801" [id=875, type=Gather]; -"876 Shape_802" [id=876, type=Shape]; -"877 Constant_803" [id=877, type=Constant]; -"878 Gather_804" [id=878, type=Gather]; -"879 Shape_805" [id=879, type=Shape]; -"880 Constant_806" [id=880, type=Constant]; -"881 Gather_807" [id=881, type=Gather]; -"882 Unsqueeze_808" [id=882, type=Unsqueeze]; -"883 Concat_809" [id=883, type=Concat]; -"884 Reshape_810" [id=884, type=Reshape]; -"885 QuantizeLinear_h.3.attn.c_proj.weight_1" [id=885, type=QuantizeLinear]; -"886 DequantizeLinear_h.3.attn.c_proj.weight_1" [id=886, type=DequantizeLinear]; -"887 Gemm_811" [id=887, type=Gemm]; -"888 Unsqueeze_812" [id=888, type=Unsqueeze]; -"889 Unsqueeze_813" [id=889, type=Unsqueeze]; -"890 Concat_814" [id=890, type=Concat]; -"891 Reshape_815" [id=891, type=Reshape]; -"892 Add_816" [id=892, type=Add]; -"893 ReduceMean_817" [id=893, type=ReduceMean]; -"894 Sub_818" [id=894, type=Sub]; -"895 Constant_819" [id=895, type=Constant]; -"896 Pow_820" [id=896, type=Pow]; -"897 ReduceMean_821" [id=897, type=ReduceMean]; -"898 Constant_822" [id=898, type=Constant]; -"899 Add_823" [id=899, type=Add]; -"900 Sqrt_824" [id=900, type=Sqrt]; -"901 Div_825" [id=901, type=Div]; -"902 Mul_826" [id=902, type=Mul]; -"903 Add_827" [id=903, type=Add]; -"904 QuantizeLinear_1108_1" [id=904, type=QuantizeLinear]; -"905 DequantizeLinear_1108_1" [id=905, type=DequantizeLinear]; -"906 Shape_828" [id=906, type=Shape]; -"907 Constant_829" [id=907, type=Constant]; -"908 Gather_830" [id=908, type=Gather]; -"909 Shape_831" [id=909, type=Shape]; -"910 Constant_832" [id=910, type=Constant]; -"911 Gather_833" [id=911, type=Gather]; -"912 Shape_834" [id=912, type=Shape]; -"913 Constant_835" [id=913, type=Constant]; -"914 Gather_836" [id=914, type=Gather]; -"915 Unsqueeze_837" [id=915, type=Unsqueeze]; -"916 Concat_838" [id=916, type=Concat]; -"917 Reshape_839" [id=917, type=Reshape]; -"918 QuantizeLinear_h.3.mlp.c_fc.weight_1" [id=918, type=QuantizeLinear]; -"919 DequantizeLinear_h.3.mlp.c_fc.weight_1" [id=919, type=DequantizeLinear]; -"920 Gemm_840" [id=920, type=Gemm]; -"921 Unsqueeze_841" [id=921, type=Unsqueeze]; -"922 Unsqueeze_842" [id=922, type=Unsqueeze]; -"923 Concat_843" [id=923, type=Concat]; -"924 Reshape_844" [id=924, type=Reshape]; -"925 Constant_845" [id=925, type=Constant]; -"926 Mul_846" [id=926, type=Mul]; -"927 Constant_847" [id=927, type=Constant]; -"928 Pow_848" [id=928, type=Pow]; -"929 Constant_849" [id=929, type=Constant]; -"930 Mul_850" [id=930, type=Mul]; -"931 Add_851" [id=931, type=Add]; -"932 Constant_852" [id=932, type=Constant]; -"933 Mul_853" [id=933, type=Mul]; -"934 Tanh_854" [id=934, type=Tanh]; -"935 Constant_855" [id=935, type=Constant]; -"936 Add_856" [id=936, type=Add]; -"937 Mul_857" [id=937, type=Mul]; -"938 QuantizeLinear_1142_1" [id=938, type=QuantizeLinear]; -"939 DequantizeLinear_1142_1" [id=939, type=DequantizeLinear]; -"940 Shape_858" [id=940, type=Shape]; -"941 Constant_859" [id=941, type=Constant]; -"942 Gather_860" [id=942, type=Gather]; -"943 Shape_861" [id=943, type=Shape]; -"944 Constant_862" [id=944, type=Constant]; -"945 Gather_863" [id=945, type=Gather]; -"946 Shape_864" [id=946, type=Shape]; -"947 Constant_865" [id=947, type=Constant]; -"948 Gather_866" [id=948, type=Gather]; -"949 Unsqueeze_867" [id=949, type=Unsqueeze]; -"950 Concat_868" [id=950, type=Concat]; -"951 Reshape_869" [id=951, type=Reshape]; -"952 QuantizeLinear_h.3.mlp.c_proj.weight_1" [id=952, type=QuantizeLinear]; -"953 DequantizeLinear_h.3.mlp.c_proj.weight_1" [id=953, type=DequantizeLinear]; -"954 Gemm_870" [id=954, type=Gemm]; -"955 Unsqueeze_871" [id=955, type=Unsqueeze]; -"956 Unsqueeze_872" [id=956, type=Unsqueeze]; -"957 Concat_873" [id=957, type=Concat]; -"958 Reshape_874" [id=958, type=Reshape]; -"959 Add_875" [id=959, type=Add]; -"960 ReduceMean_876" [id=960, type=ReduceMean]; -"961 Sub_877" [id=961, type=Sub]; -"962 Constant_878" [id=962, type=Constant]; -"963 Pow_879" [id=963, type=Pow]; -"964 ReduceMean_880" [id=964, type=ReduceMean]; -"965 Constant_881" [id=965, type=Constant]; -"966 Add_882" [id=966, type=Add]; -"967 Sqrt_883" [id=967, type=Sqrt]; -"968 Div_884" [id=968, type=Div]; -"969 Mul_885" [id=969, type=Mul]; -"970 Add_886" [id=970, type=Add]; -"971 QuantizeLinear_1175_1" [id=971, type=QuantizeLinear]; -"972 DequantizeLinear_1175_1" [id=972, type=DequantizeLinear]; -"973 Shape_887" [id=973, type=Shape]; -"974 Constant_888" [id=974, type=Constant]; -"975 Gather_889" [id=975, type=Gather]; -"976 Shape_890" [id=976, type=Shape]; -"977 Constant_891" [id=977, type=Constant]; -"978 Gather_892" [id=978, type=Gather]; -"979 Shape_893" [id=979, type=Shape]; -"980 Constant_894" [id=980, type=Constant]; -"981 Gather_895" [id=981, type=Gather]; -"982 Unsqueeze_896" [id=982, type=Unsqueeze]; -"983 Concat_897" [id=983, type=Concat]; -"984 Reshape_898" [id=984, type=Reshape]; -"985 QuantizeLinear_h.4.attn.c_attn.weight_1" [id=985, type=QuantizeLinear]; -"986 DequantizeLinear_h.4.attn.c_attn.weight_1" [id=986, type=DequantizeLinear]; -"987 Gemm_899" [id=987, type=Gemm]; -"988 Unsqueeze_900" [id=988, type=Unsqueeze]; -"989 Unsqueeze_901" [id=989, type=Unsqueeze]; -"990 Concat_902" [id=990, type=Concat]; -"991 Reshape_903" [id=991, type=Reshape]; -"992 Split_904" [id=992, type=Split]; -"993 QuantizeLinear_query.9_1" [id=993, type=QuantizeLinear]; -"994 DequantizeLinear_query.9_1" [id=994, type=DequantizeLinear]; -"995 Shape_905" [id=995, type=Shape]; -"996 Constant_906" [id=996, type=Constant]; -"997 Gather_907" [id=997, type=Gather]; -"998 Shape_908" [id=998, type=Shape]; -"999 Constant_909" [id=999, type=Constant]; -"1000 Gather_910" [id=1000, type=Gather]; -"1001 Shape_911" [id=1001, type=Shape]; -"1002 Constant_912" [id=1002, type=Constant]; -"1003 Gather_913" [id=1003, type=Gather]; -"1004 Constant_914" [id=1004, type=Constant]; -"1005 Div_915" [id=1005, type=Div]; -"1006 Cast_916" [id=1006, type=Cast]; -"1007 Cast_917" [id=1007, type=Cast]; -"1008 Unsqueeze_918" [id=1008, type=Unsqueeze]; -"1009 Unsqueeze_919" [id=1009, type=Unsqueeze]; -"1010 Unsqueeze_920" [id=1010, type=Unsqueeze]; -"1011 Concat_921" [id=1011, type=Concat]; -"1012 Reshape_922" [id=1012, type=Reshape]; -"1013 Transpose_923" [id=1013, type=Transpose]; -"1014 Shape_924" [id=1014, type=Shape]; -"1015 Constant_925" [id=1015, type=Constant]; -"1016 Gather_926" [id=1016, type=Gather]; -"1017 Shape_927" [id=1017, type=Shape]; -"1018 Constant_928" [id=1018, type=Constant]; -"1019 Gather_929" [id=1019, type=Gather]; -"1020 Shape_930" [id=1020, type=Shape]; -"1021 Constant_931" [id=1021, type=Constant]; -"1022 Gather_932" [id=1022, type=Gather]; -"1023 Constant_933" [id=1023, type=Constant]; -"1024 Div_934" [id=1024, type=Div]; -"1025 Cast_935" [id=1025, type=Cast]; -"1026 Cast_936" [id=1026, type=Cast]; -"1027 Unsqueeze_937" [id=1027, type=Unsqueeze]; -"1028 Unsqueeze_938" [id=1028, type=Unsqueeze]; -"1029 Unsqueeze_939" [id=1029, type=Unsqueeze]; -"1030 Concat_940" [id=1030, type=Concat]; -"1031 Reshape_941" [id=1031, type=Reshape]; -"1032 QuantizeLinear_1240_1" [id=1032, type=QuantizeLinear]; -"1033 DequantizeLinear_1240_1" [id=1033, type=DequantizeLinear]; -"1034 Transpose_942" [id=1034, type=Transpose]; -"1035 Shape_943" [id=1035, type=Shape]; -"1036 Constant_944" [id=1036, type=Constant]; -"1037 Gather_945" [id=1037, type=Gather]; -"1038 Shape_946" [id=1038, type=Shape]; -"1039 Constant_947" [id=1039, type=Constant]; -"1040 Gather_948" [id=1040, type=Gather]; -"1041 Shape_949" [id=1041, type=Shape]; -"1042 Constant_950" [id=1042, type=Constant]; -"1043 Gather_951" [id=1043, type=Gather]; -"1044 Constant_952" [id=1044, type=Constant]; -"1045 Div_953" [id=1045, type=Div]; -"1046 Cast_954" [id=1046, type=Cast]; -"1047 Cast_955" [id=1047, type=Cast]; -"1048 Unsqueeze_956" [id=1048, type=Unsqueeze]; -"1049 Unsqueeze_957" [id=1049, type=Unsqueeze]; -"1050 Unsqueeze_958" [id=1050, type=Unsqueeze]; -"1051 Concat_959" [id=1051, type=Concat]; -"1052 Reshape_960" [id=1052, type=Reshape]; -"1053 Transpose_961" [id=1053, type=Transpose]; -"1054 Transpose_962" [id=1054, type=Transpose]; -"1055 Unsqueeze_963" [id=1055, type=Unsqueeze]; -"1056 Unsqueeze_964" [id=1056, type=Unsqueeze]; -"1057 Concat_965" [id=1057, type=Concat]; -"1058 MatMul_966" [id=1058, type=MatMul]; -"1059 Constant_967" [id=1059, type=Constant]; -"1060 Div_968" [id=1060, type=Div]; -"1061 Shape_969" [id=1061, type=Shape]; -"1062 Constant_970" [id=1062, type=Constant]; -"1063 Gather_971" [id=1063, type=Gather]; -"1064 Shape_972" [id=1064, type=Shape]; -"1065 Constant_973" [id=1065, type=Constant]; -"1066 Gather_974" [id=1066, type=Gather]; -"1067 Sub_975" [id=1067, type=Sub]; -"1068 Unsqueeze_976" [id=1068, type=Unsqueeze]; -"1069 Unsqueeze_977" [id=1069, type=Unsqueeze]; -"1070 Constant_978" [id=1070, type=Constant]; -"1071 Slice_979" [id=1071, type=Slice]; -"1072 Unsqueeze_980" [id=1072, type=Unsqueeze]; -"1073 Constant_981" [id=1073, type=Constant]; -"1074 Slice_982" [id=1074, type=Slice]; -"1075 Mul_983" [id=1075, type=Mul]; -"1076 Constant_984" [id=1076, type=Constant]; -"1077 Sub_985" [id=1077, type=Sub]; -"1078 Constant_986" [id=1078, type=Constant]; -"1079 Mul_987" [id=1079, type=Mul]; -"1080 Sub_988" [id=1080, type=Sub]; -"1081 Softmax_989" [id=1081, type=Softmax]; -"1082 MatMul_990" [id=1082, type=MatMul]; -"1083 QuantizeLinear_1297_1" [id=1083, type=QuantizeLinear]; -"1084 DequantizeLinear_1297_1" [id=1084, type=DequantizeLinear]; -"1085 Transpose_991" [id=1085, type=Transpose]; -"1086 Shape_992" [id=1086, type=Shape]; -"1087 Constant_993" [id=1087, type=Constant]; -"1088 Gather_994" [id=1088, type=Gather]; -"1089 Shape_995" [id=1089, type=Shape]; -"1090 Constant_996" [id=1090, type=Constant]; -"1091 Gather_997" [id=1091, type=Gather]; -"1092 Shape_998" [id=1092, type=Shape]; -"1093 Constant_999" [id=1093, type=Constant]; -"1094 Gather_1000" [id=1094, type=Gather]; -"1095 Shape_1001" [id=1095, type=Shape]; -"1096 Constant_1002" [id=1096, type=Constant]; -"1097 Gather_1003" [id=1097, type=Gather]; -"1098 Mul_1004" [id=1098, type=Mul]; -"1099 Unsqueeze_1005" [id=1099, type=Unsqueeze]; -"1100 Unsqueeze_1006" [id=1100, type=Unsqueeze]; -"1101 Unsqueeze_1007" [id=1101, type=Unsqueeze]; -"1102 Concat_1008" [id=1102, type=Concat]; -"1103 Reshape_1009" [id=1103, type=Reshape]; -"1104 Shape_1010" [id=1104, type=Shape]; -"1105 Constant_1011" [id=1105, type=Constant]; -"1106 Gather_1012" [id=1106, type=Gather]; -"1107 Shape_1013" [id=1107, type=Shape]; -"1108 Constant_1014" [id=1108, type=Constant]; -"1109 Gather_1015" [id=1109, type=Gather]; -"1110 Shape_1016" [id=1110, type=Shape]; -"1111 Constant_1017" [id=1111, type=Constant]; -"1112 Gather_1018" [id=1112, type=Gather]; -"1113 Unsqueeze_1019" [id=1113, type=Unsqueeze]; -"1114 Concat_1020" [id=1114, type=Concat]; -"1115 Reshape_1021" [id=1115, type=Reshape]; -"1116 QuantizeLinear_h.4.attn.c_proj.weight_1" [id=1116, type=QuantizeLinear]; -"1117 DequantizeLinear_h.4.attn.c_proj.weight_1" [id=1117, type=DequantizeLinear]; -"1118 Gemm_1022" [id=1118, type=Gemm]; -"1119 Unsqueeze_1023" [id=1119, type=Unsqueeze]; -"1120 Unsqueeze_1024" [id=1120, type=Unsqueeze]; -"1121 Concat_1025" [id=1121, type=Concat]; -"1122 Reshape_1026" [id=1122, type=Reshape]; -"1123 Add_1027" [id=1123, type=Add]; -"1124 ReduceMean_1028" [id=1124, type=ReduceMean]; -"1125 Sub_1029" [id=1125, type=Sub]; -"1126 Constant_1030" [id=1126, type=Constant]; -"1127 Pow_1031" [id=1127, type=Pow]; -"1128 ReduceMean_1032" [id=1128, type=ReduceMean]; -"1129 Constant_1033" [id=1129, type=Constant]; -"1130 Add_1034" [id=1130, type=Add]; -"1131 Sqrt_1035" [id=1131, type=Sqrt]; -"1132 Div_1036" [id=1132, type=Div]; -"1133 Mul_1037" [id=1133, type=Mul]; -"1134 Add_1038" [id=1134, type=Add]; -"1135 QuantizeLinear_1349_1" [id=1135, type=QuantizeLinear]; -"1136 DequantizeLinear_1349_1" [id=1136, type=DequantizeLinear]; -"1137 Shape_1039" [id=1137, type=Shape]; -"1138 Constant_1040" [id=1138, type=Constant]; -"1139 Gather_1041" [id=1139, type=Gather]; -"1140 Shape_1042" [id=1140, type=Shape]; -"1141 Constant_1043" [id=1141, type=Constant]; -"1142 Gather_1044" [id=1142, type=Gather]; -"1143 Shape_1045" [id=1143, type=Shape]; -"1144 Constant_1046" [id=1144, type=Constant]; -"1145 Gather_1047" [id=1145, type=Gather]; -"1146 Unsqueeze_1048" [id=1146, type=Unsqueeze]; -"1147 Concat_1049" [id=1147, type=Concat]; -"1148 Reshape_1050" [id=1148, type=Reshape]; -"1149 QuantizeLinear_h.4.mlp.c_fc.weight_1" [id=1149, type=QuantizeLinear]; -"1150 DequantizeLinear_h.4.mlp.c_fc.weight_1" [id=1150, type=DequantizeLinear]; -"1151 Gemm_1051" [id=1151, type=Gemm]; -"1152 Unsqueeze_1052" [id=1152, type=Unsqueeze]; -"1153 Unsqueeze_1053" [id=1153, type=Unsqueeze]; -"1154 Concat_1054" [id=1154, type=Concat]; -"1155 Reshape_1055" [id=1155, type=Reshape]; -"1156 Constant_1056" [id=1156, type=Constant]; -"1157 Mul_1057" [id=1157, type=Mul]; -"1158 Constant_1058" [id=1158, type=Constant]; -"1159 Pow_1059" [id=1159, type=Pow]; -"1160 Constant_1060" [id=1160, type=Constant]; -"1161 Mul_1061" [id=1161, type=Mul]; -"1162 Add_1062" [id=1162, type=Add]; -"1163 Constant_1063" [id=1163, type=Constant]; -"1164 Mul_1064" [id=1164, type=Mul]; -"1165 Tanh_1065" [id=1165, type=Tanh]; -"1166 Constant_1066" [id=1166, type=Constant]; -"1167 Add_1067" [id=1167, type=Add]; -"1168 Mul_1068" [id=1168, type=Mul]; -"1169 QuantizeLinear_1383_1" [id=1169, type=QuantizeLinear]; -"1170 DequantizeLinear_1383_1" [id=1170, type=DequantizeLinear]; -"1171 Shape_1069" [id=1171, type=Shape]; -"1172 Constant_1070" [id=1172, type=Constant]; -"1173 Gather_1071" [id=1173, type=Gather]; -"1174 Shape_1072" [id=1174, type=Shape]; -"1175 Constant_1073" [id=1175, type=Constant]; -"1176 Gather_1074" [id=1176, type=Gather]; -"1177 Shape_1075" [id=1177, type=Shape]; -"1178 Constant_1076" [id=1178, type=Constant]; -"1179 Gather_1077" [id=1179, type=Gather]; -"1180 Unsqueeze_1078" [id=1180, type=Unsqueeze]; -"1181 Concat_1079" [id=1181, type=Concat]; -"1182 Reshape_1080" [id=1182, type=Reshape]; -"1183 QuantizeLinear_h.4.mlp.c_proj.weight_1" [id=1183, type=QuantizeLinear]; -"1184 DequantizeLinear_h.4.mlp.c_proj.weight_1" [id=1184, type=DequantizeLinear]; -"1185 Gemm_1081" [id=1185, type=Gemm]; -"1186 Unsqueeze_1082" [id=1186, type=Unsqueeze]; -"1187 Unsqueeze_1083" [id=1187, type=Unsqueeze]; -"1188 Concat_1084" [id=1188, type=Concat]; -"1189 Reshape_1085" [id=1189, type=Reshape]; -"1190 Add_1086" [id=1190, type=Add]; -"1191 ReduceMean_1087" [id=1191, type=ReduceMean]; -"1192 Sub_1088" [id=1192, type=Sub]; -"1193 Constant_1089" [id=1193, type=Constant]; -"1194 Pow_1090" [id=1194, type=Pow]; -"1195 ReduceMean_1091" [id=1195, type=ReduceMean]; -"1196 Constant_1092" [id=1196, type=Constant]; -"1197 Add_1093" [id=1197, type=Add]; -"1198 Sqrt_1094" [id=1198, type=Sqrt]; -"1199 Div_1095" [id=1199, type=Div]; -"1200 Mul_1096" [id=1200, type=Mul]; -"1201 Add_1097" [id=1201, type=Add]; -"1202 QuantizeLinear_1416_1" [id=1202, type=QuantizeLinear]; -"1203 DequantizeLinear_1416_1" [id=1203, type=DequantizeLinear]; -"1204 Shape_1098" [id=1204, type=Shape]; -"1205 Constant_1099" [id=1205, type=Constant]; -"1206 Gather_1100" [id=1206, type=Gather]; -"1207 Shape_1101" [id=1207, type=Shape]; -"1208 Constant_1102" [id=1208, type=Constant]; -"1209 Gather_1103" [id=1209, type=Gather]; -"1210 Shape_1104" [id=1210, type=Shape]; -"1211 Constant_1105" [id=1211, type=Constant]; -"1212 Gather_1106" [id=1212, type=Gather]; -"1213 Unsqueeze_1107" [id=1213, type=Unsqueeze]; -"1214 Concat_1108" [id=1214, type=Concat]; -"1215 Reshape_1109" [id=1215, type=Reshape]; -"1216 QuantizeLinear_h.5.attn.c_attn.weight_1" [id=1216, type=QuantizeLinear]; -"1217 DequantizeLinear_h.5.attn.c_attn.weight_1" [id=1217, type=DequantizeLinear]; -"1218 Gemm_1110" [id=1218, type=Gemm]; -"1219 Unsqueeze_1111" [id=1219, type=Unsqueeze]; -"1220 Unsqueeze_1112" [id=1220, type=Unsqueeze]; -"1221 Concat_1113" [id=1221, type=Concat]; -"1222 Reshape_1114" [id=1222, type=Reshape]; -"1223 Split_1115" [id=1223, type=Split]; -"1224 QuantizeLinear_query.11_1" [id=1224, type=QuantizeLinear]; -"1225 DequantizeLinear_query.11_1" [id=1225, type=DequantizeLinear]; -"1226 Shape_1116" [id=1226, type=Shape]; -"1227 Constant_1117" [id=1227, type=Constant]; -"1228 Gather_1118" [id=1228, type=Gather]; -"1229 Shape_1119" [id=1229, type=Shape]; -"1230 Constant_1120" [id=1230, type=Constant]; -"1231 Gather_1121" [id=1231, type=Gather]; -"1232 Shape_1122" [id=1232, type=Shape]; -"1233 Constant_1123" [id=1233, type=Constant]; -"1234 Gather_1124" [id=1234, type=Gather]; -"1235 Constant_1125" [id=1235, type=Constant]; -"1236 Div_1126" [id=1236, type=Div]; -"1237 Cast_1127" [id=1237, type=Cast]; -"1238 Cast_1128" [id=1238, type=Cast]; -"1239 Unsqueeze_1129" [id=1239, type=Unsqueeze]; -"1240 Unsqueeze_1130" [id=1240, type=Unsqueeze]; -"1241 Unsqueeze_1131" [id=1241, type=Unsqueeze]; -"1242 Concat_1132" [id=1242, type=Concat]; -"1243 Reshape_1133" [id=1243, type=Reshape]; -"1244 Transpose_1134" [id=1244, type=Transpose]; -"1245 Shape_1135" [id=1245, type=Shape]; -"1246 Constant_1136" [id=1246, type=Constant]; -"1247 Gather_1137" [id=1247, type=Gather]; -"1248 Shape_1138" [id=1248, type=Shape]; -"1249 Constant_1139" [id=1249, type=Constant]; -"1250 Gather_1140" [id=1250, type=Gather]; -"1251 Shape_1141" [id=1251, type=Shape]; -"1252 Constant_1142" [id=1252, type=Constant]; -"1253 Gather_1143" [id=1253, type=Gather]; -"1254 Constant_1144" [id=1254, type=Constant]; -"1255 Div_1145" [id=1255, type=Div]; -"1256 Cast_1146" [id=1256, type=Cast]; -"1257 Cast_1147" [id=1257, type=Cast]; -"1258 Unsqueeze_1148" [id=1258, type=Unsqueeze]; -"1259 Unsqueeze_1149" [id=1259, type=Unsqueeze]; -"1260 Unsqueeze_1150" [id=1260, type=Unsqueeze]; -"1261 Concat_1151" [id=1261, type=Concat]; -"1262 Reshape_1152" [id=1262, type=Reshape]; -"1263 QuantizeLinear_1481_1" [id=1263, type=QuantizeLinear]; -"1264 DequantizeLinear_1481_1" [id=1264, type=DequantizeLinear]; -"1265 Transpose_1153" [id=1265, type=Transpose]; -"1266 Shape_1154" [id=1266, type=Shape]; -"1267 Constant_1155" [id=1267, type=Constant]; -"1268 Gather_1156" [id=1268, type=Gather]; -"1269 Shape_1157" [id=1269, type=Shape]; -"1270 Constant_1158" [id=1270, type=Constant]; -"1271 Gather_1159" [id=1271, type=Gather]; -"1272 Shape_1160" [id=1272, type=Shape]; -"1273 Constant_1161" [id=1273, type=Constant]; -"1274 Gather_1162" [id=1274, type=Gather]; -"1275 Constant_1163" [id=1275, type=Constant]; -"1276 Div_1164" [id=1276, type=Div]; -"1277 Cast_1165" [id=1277, type=Cast]; -"1278 Cast_1166" [id=1278, type=Cast]; -"1279 Unsqueeze_1167" [id=1279, type=Unsqueeze]; -"1280 Unsqueeze_1168" [id=1280, type=Unsqueeze]; -"1281 Unsqueeze_1169" [id=1281, type=Unsqueeze]; -"1282 Concat_1170" [id=1282, type=Concat]; -"1283 Reshape_1171" [id=1283, type=Reshape]; -"1284 Transpose_1172" [id=1284, type=Transpose]; -"1285 Transpose_1173" [id=1285, type=Transpose]; -"1286 Unsqueeze_1174" [id=1286, type=Unsqueeze]; -"1287 Unsqueeze_1175" [id=1287, type=Unsqueeze]; -"1288 Concat_1176" [id=1288, type=Concat]; -"1289 MatMul_1177" [id=1289, type=MatMul]; -"1290 Constant_1178" [id=1290, type=Constant]; -"1291 Div_1179" [id=1291, type=Div]; -"1292 Shape_1180" [id=1292, type=Shape]; -"1293 Constant_1181" [id=1293, type=Constant]; -"1294 Gather_1182" [id=1294, type=Gather]; -"1295 Shape_1183" [id=1295, type=Shape]; -"1296 Constant_1184" [id=1296, type=Constant]; -"1297 Gather_1185" [id=1297, type=Gather]; -"1298 Sub_1186" [id=1298, type=Sub]; -"1299 Unsqueeze_1187" [id=1299, type=Unsqueeze]; -"1300 Unsqueeze_1188" [id=1300, type=Unsqueeze]; -"1301 Constant_1189" [id=1301, type=Constant]; -"1302 Slice_1190" [id=1302, type=Slice]; -"1303 Unsqueeze_1191" [id=1303, type=Unsqueeze]; -"1304 Constant_1192" [id=1304, type=Constant]; -"1305 Slice_1193" [id=1305, type=Slice]; -"1306 Mul_1194" [id=1306, type=Mul]; -"1307 Constant_1195" [id=1307, type=Constant]; -"1308 Sub_1196" [id=1308, type=Sub]; -"1309 Constant_1197" [id=1309, type=Constant]; -"1310 Mul_1198" [id=1310, type=Mul]; -"1311 Sub_1199" [id=1311, type=Sub]; -"1312 Softmax_1200" [id=1312, type=Softmax]; -"1313 MatMul_1201" [id=1313, type=MatMul]; -"1314 QuantizeLinear_1538_1" [id=1314, type=QuantizeLinear]; -"1315 DequantizeLinear_1538_1" [id=1315, type=DequantizeLinear]; -"1316 Transpose_1202" [id=1316, type=Transpose]; -"1317 Shape_1203" [id=1317, type=Shape]; -"1318 Constant_1204" [id=1318, type=Constant]; -"1319 Gather_1205" [id=1319, type=Gather]; -"1320 Shape_1206" [id=1320, type=Shape]; -"1321 Constant_1207" [id=1321, type=Constant]; -"1322 Gather_1208" [id=1322, type=Gather]; -"1323 Shape_1209" [id=1323, type=Shape]; -"1324 Constant_1210" [id=1324, type=Constant]; -"1325 Gather_1211" [id=1325, type=Gather]; -"1326 Shape_1212" [id=1326, type=Shape]; -"1327 Constant_1213" [id=1327, type=Constant]; -"1328 Gather_1214" [id=1328, type=Gather]; -"1329 Mul_1215" [id=1329, type=Mul]; -"1330 Unsqueeze_1216" [id=1330, type=Unsqueeze]; -"1331 Unsqueeze_1217" [id=1331, type=Unsqueeze]; -"1332 Unsqueeze_1218" [id=1332, type=Unsqueeze]; -"1333 Concat_1219" [id=1333, type=Concat]; -"1334 Reshape_1220" [id=1334, type=Reshape]; -"1335 Shape_1221" [id=1335, type=Shape]; -"1336 Constant_1222" [id=1336, type=Constant]; -"1337 Gather_1223" [id=1337, type=Gather]; -"1338 Shape_1224" [id=1338, type=Shape]; -"1339 Constant_1225" [id=1339, type=Constant]; -"1340 Gather_1226" [id=1340, type=Gather]; -"1341 Shape_1227" [id=1341, type=Shape]; -"1342 Constant_1228" [id=1342, type=Constant]; -"1343 Gather_1229" [id=1343, type=Gather]; -"1344 Unsqueeze_1230" [id=1344, type=Unsqueeze]; -"1345 Concat_1231" [id=1345, type=Concat]; -"1346 Reshape_1232" [id=1346, type=Reshape]; -"1347 QuantizeLinear_h.5.attn.c_proj.weight_1" [id=1347, type=QuantizeLinear]; -"1348 DequantizeLinear_h.5.attn.c_proj.weight_1" [id=1348, type=DequantizeLinear]; -"1349 Gemm_1233" [id=1349, type=Gemm]; -"1350 Unsqueeze_1234" [id=1350, type=Unsqueeze]; -"1351 Unsqueeze_1235" [id=1351, type=Unsqueeze]; -"1352 Concat_1236" [id=1352, type=Concat]; -"1353 Reshape_1237" [id=1353, type=Reshape]; -"1354 Add_1238" [id=1354, type=Add]; -"1355 ReduceMean_1239" [id=1355, type=ReduceMean]; -"1356 Sub_1240" [id=1356, type=Sub]; -"1357 Constant_1241" [id=1357, type=Constant]; -"1358 Pow_1242" [id=1358, type=Pow]; -"1359 ReduceMean_1243" [id=1359, type=ReduceMean]; -"1360 Constant_1244" [id=1360, type=Constant]; -"1361 Add_1245" [id=1361, type=Add]; -"1362 Sqrt_1246" [id=1362, type=Sqrt]; -"1363 Div_1247" [id=1363, type=Div]; -"1364 Mul_1248" [id=1364, type=Mul]; -"1365 Add_1249" [id=1365, type=Add]; -"1366 QuantizeLinear_1590_1" [id=1366, type=QuantizeLinear]; -"1367 DequantizeLinear_1590_1" [id=1367, type=DequantizeLinear]; -"1368 Shape_1250" [id=1368, type=Shape]; -"1369 Constant_1251" [id=1369, type=Constant]; -"1370 Gather_1252" [id=1370, type=Gather]; -"1371 Shape_1253" [id=1371, type=Shape]; -"1372 Constant_1254" [id=1372, type=Constant]; -"1373 Gather_1255" [id=1373, type=Gather]; -"1374 Shape_1256" [id=1374, type=Shape]; -"1375 Constant_1257" [id=1375, type=Constant]; -"1376 Gather_1258" [id=1376, type=Gather]; -"1377 Unsqueeze_1259" [id=1377, type=Unsqueeze]; -"1378 Concat_1260" [id=1378, type=Concat]; -"1379 Reshape_1261" [id=1379, type=Reshape]; -"1380 QuantizeLinear_h.5.mlp.c_fc.weight_1" [id=1380, type=QuantizeLinear]; -"1381 DequantizeLinear_h.5.mlp.c_fc.weight_1" [id=1381, type=DequantizeLinear]; -"1382 Gemm_1262" [id=1382, type=Gemm]; -"1383 Unsqueeze_1263" [id=1383, type=Unsqueeze]; -"1384 Unsqueeze_1264" [id=1384, type=Unsqueeze]; -"1385 Concat_1265" [id=1385, type=Concat]; -"1386 Reshape_1266" [id=1386, type=Reshape]; -"1387 Constant_1267" [id=1387, type=Constant]; -"1388 Mul_1268" [id=1388, type=Mul]; -"1389 Constant_1269" [id=1389, type=Constant]; -"1390 Pow_1270" [id=1390, type=Pow]; -"1391 Constant_1271" [id=1391, type=Constant]; -"1392 Mul_1272" [id=1392, type=Mul]; -"1393 Add_1273" [id=1393, type=Add]; -"1394 Constant_1274" [id=1394, type=Constant]; -"1395 Mul_1275" [id=1395, type=Mul]; -"1396 Tanh_1276" [id=1396, type=Tanh]; -"1397 Constant_1277" [id=1397, type=Constant]; -"1398 Add_1278" [id=1398, type=Add]; -"1399 Mul_1279" [id=1399, type=Mul]; -"1400 QuantizeLinear_1624_1" [id=1400, type=QuantizeLinear]; -"1401 DequantizeLinear_1624_1" [id=1401, type=DequantizeLinear]; -"1402 Shape_1280" [id=1402, type=Shape]; -"1403 Constant_1281" [id=1403, type=Constant]; -"1404 Gather_1282" [id=1404, type=Gather]; -"1405 Shape_1283" [id=1405, type=Shape]; -"1406 Constant_1284" [id=1406, type=Constant]; -"1407 Gather_1285" [id=1407, type=Gather]; -"1408 Shape_1286" [id=1408, type=Shape]; -"1409 Constant_1287" [id=1409, type=Constant]; -"1410 Gather_1288" [id=1410, type=Gather]; -"1411 Unsqueeze_1289" [id=1411, type=Unsqueeze]; -"1412 Concat_1290" [id=1412, type=Concat]; -"1413 Reshape_1291" [id=1413, type=Reshape]; -"1414 QuantizeLinear_h.5.mlp.c_proj.weight_1" [id=1414, type=QuantizeLinear]; -"1415 DequantizeLinear_h.5.mlp.c_proj.weight_1" [id=1415, type=DequantizeLinear]; -"1416 Gemm_1292" [id=1416, type=Gemm]; -"1417 Unsqueeze_1293" [id=1417, type=Unsqueeze]; -"1418 Unsqueeze_1294" [id=1418, type=Unsqueeze]; -"1419 Concat_1295" [id=1419, type=Concat]; -"1420 Reshape_1296" [id=1420, type=Reshape]; -"1421 Add_1297" [id=1421, type=Add]; -"1422 ReduceMean_1298" [id=1422, type=ReduceMean]; -"1423 Sub_1299" [id=1423, type=Sub]; -"1424 Constant_1300" [id=1424, type=Constant]; -"1425 Pow_1301" [id=1425, type=Pow]; -"1426 ReduceMean_1302" [id=1426, type=ReduceMean]; -"1427 Constant_1303" [id=1427, type=Constant]; -"1428 Add_1304" [id=1428, type=Add]; -"1429 Sqrt_1305" [id=1429, type=Sqrt]; -"1430 Div_1306" [id=1430, type=Div]; -"1431 Mul_1307" [id=1431, type=Mul]; -"1432 Add_1308" [id=1432, type=Add]; -"1433 QuantizeLinear_1657_1" [id=1433, type=QuantizeLinear]; -"1434 DequantizeLinear_1657_1" [id=1434, type=DequantizeLinear]; -"1435 Shape_1309" [id=1435, type=Shape]; -"1436 Constant_1310" [id=1436, type=Constant]; -"1437 Gather_1311" [id=1437, type=Gather]; -"1438 Shape_1312" [id=1438, type=Shape]; -"1439 Constant_1313" [id=1439, type=Constant]; -"1440 Gather_1314" [id=1440, type=Gather]; -"1441 Shape_1315" [id=1441, type=Shape]; -"1442 Constant_1316" [id=1442, type=Constant]; -"1443 Gather_1317" [id=1443, type=Gather]; -"1444 Unsqueeze_1318" [id=1444, type=Unsqueeze]; -"1445 Concat_1319" [id=1445, type=Concat]; -"1446 Reshape_1320" [id=1446, type=Reshape]; -"1447 QuantizeLinear_h.6.attn.c_attn.weight_1" [id=1447, type=QuantizeLinear]; -"1448 DequantizeLinear_h.6.attn.c_attn.weight_1" [id=1448, type=DequantizeLinear]; -"1449 Gemm_1321" [id=1449, type=Gemm]; -"1450 Unsqueeze_1322" [id=1450, type=Unsqueeze]; -"1451 Unsqueeze_1323" [id=1451, type=Unsqueeze]; -"1452 Concat_1324" [id=1452, type=Concat]; -"1453 Reshape_1325" [id=1453, type=Reshape]; -"1454 Split_1326" [id=1454, type=Split]; -"1455 QuantizeLinear_query.13_1" [id=1455, type=QuantizeLinear]; -"1456 DequantizeLinear_query.13_1" [id=1456, type=DequantizeLinear]; -"1457 Shape_1327" [id=1457, type=Shape]; -"1458 Constant_1328" [id=1458, type=Constant]; -"1459 Gather_1329" [id=1459, type=Gather]; -"1460 Shape_1330" [id=1460, type=Shape]; -"1461 Constant_1331" [id=1461, type=Constant]; -"1462 Gather_1332" [id=1462, type=Gather]; -"1463 Shape_1333" [id=1463, type=Shape]; -"1464 Constant_1334" [id=1464, type=Constant]; -"1465 Gather_1335" [id=1465, type=Gather]; -"1466 Constant_1336" [id=1466, type=Constant]; -"1467 Div_1337" [id=1467, type=Div]; -"1468 Cast_1338" [id=1468, type=Cast]; -"1469 Cast_1339" [id=1469, type=Cast]; -"1470 Unsqueeze_1340" [id=1470, type=Unsqueeze]; -"1471 Unsqueeze_1341" [id=1471, type=Unsqueeze]; -"1472 Unsqueeze_1342" [id=1472, type=Unsqueeze]; -"1473 Concat_1343" [id=1473, type=Concat]; -"1474 Reshape_1344" [id=1474, type=Reshape]; -"1475 Transpose_1345" [id=1475, type=Transpose]; -"1476 Shape_1346" [id=1476, type=Shape]; -"1477 Constant_1347" [id=1477, type=Constant]; -"1478 Gather_1348" [id=1478, type=Gather]; -"1479 Shape_1349" [id=1479, type=Shape]; -"1480 Constant_1350" [id=1480, type=Constant]; -"1481 Gather_1351" [id=1481, type=Gather]; -"1482 Shape_1352" [id=1482, type=Shape]; -"1483 Constant_1353" [id=1483, type=Constant]; -"1484 Gather_1354" [id=1484, type=Gather]; -"1485 Constant_1355" [id=1485, type=Constant]; -"1486 Div_1356" [id=1486, type=Div]; -"1487 Cast_1357" [id=1487, type=Cast]; -"1488 Cast_1358" [id=1488, type=Cast]; -"1489 Unsqueeze_1359" [id=1489, type=Unsqueeze]; -"1490 Unsqueeze_1360" [id=1490, type=Unsqueeze]; -"1491 Unsqueeze_1361" [id=1491, type=Unsqueeze]; -"1492 Concat_1362" [id=1492, type=Concat]; -"1493 Reshape_1363" [id=1493, type=Reshape]; -"1494 QuantizeLinear_1722_1" [id=1494, type=QuantizeLinear]; -"1495 DequantizeLinear_1722_1" [id=1495, type=DequantizeLinear]; -"1496 Transpose_1364" [id=1496, type=Transpose]; -"1497 Shape_1365" [id=1497, type=Shape]; -"1498 Constant_1366" [id=1498, type=Constant]; -"1499 Gather_1367" [id=1499, type=Gather]; -"1500 Shape_1368" [id=1500, type=Shape]; -"1501 Constant_1369" [id=1501, type=Constant]; -"1502 Gather_1370" [id=1502, type=Gather]; -"1503 Shape_1371" [id=1503, type=Shape]; -"1504 Constant_1372" [id=1504, type=Constant]; -"1505 Gather_1373" [id=1505, type=Gather]; -"1506 Constant_1374" [id=1506, type=Constant]; -"1507 Div_1375" [id=1507, type=Div]; -"1508 Cast_1376" [id=1508, type=Cast]; -"1509 Cast_1377" [id=1509, type=Cast]; -"1510 Unsqueeze_1378" [id=1510, type=Unsqueeze]; -"1511 Unsqueeze_1379" [id=1511, type=Unsqueeze]; -"1512 Unsqueeze_1380" [id=1512, type=Unsqueeze]; -"1513 Concat_1381" [id=1513, type=Concat]; -"1514 Reshape_1382" [id=1514, type=Reshape]; -"1515 Transpose_1383" [id=1515, type=Transpose]; -"1516 Transpose_1384" [id=1516, type=Transpose]; -"1517 Unsqueeze_1385" [id=1517, type=Unsqueeze]; -"1518 Unsqueeze_1386" [id=1518, type=Unsqueeze]; -"1519 Concat_1387" [id=1519, type=Concat]; -"1520 MatMul_1388" [id=1520, type=MatMul]; -"1521 Constant_1389" [id=1521, type=Constant]; -"1522 Div_1390" [id=1522, type=Div]; -"1523 Shape_1391" [id=1523, type=Shape]; -"1524 Constant_1392" [id=1524, type=Constant]; -"1525 Gather_1393" [id=1525, type=Gather]; -"1526 Shape_1394" [id=1526, type=Shape]; -"1527 Constant_1395" [id=1527, type=Constant]; -"1528 Gather_1396" [id=1528, type=Gather]; -"1529 Sub_1397" [id=1529, type=Sub]; -"1530 Unsqueeze_1398" [id=1530, type=Unsqueeze]; -"1531 Unsqueeze_1399" [id=1531, type=Unsqueeze]; -"1532 Constant_1400" [id=1532, type=Constant]; -"1533 Slice_1401" [id=1533, type=Slice]; -"1534 Unsqueeze_1402" [id=1534, type=Unsqueeze]; -"1535 Constant_1403" [id=1535, type=Constant]; -"1536 Slice_1404" [id=1536, type=Slice]; -"1537 Mul_1405" [id=1537, type=Mul]; -"1538 Constant_1406" [id=1538, type=Constant]; -"1539 Sub_1407" [id=1539, type=Sub]; -"1540 Constant_1408" [id=1540, type=Constant]; -"1541 Mul_1409" [id=1541, type=Mul]; -"1542 Sub_1410" [id=1542, type=Sub]; -"1543 Softmax_1411" [id=1543, type=Softmax]; -"1544 MatMul_1412" [id=1544, type=MatMul]; -"1545 QuantizeLinear_1779_1" [id=1545, type=QuantizeLinear]; -"1546 DequantizeLinear_1779_1" [id=1546, type=DequantizeLinear]; -"1547 Transpose_1413" [id=1547, type=Transpose]; -"1548 Shape_1414" [id=1548, type=Shape]; -"1549 Constant_1415" [id=1549, type=Constant]; -"1550 Gather_1416" [id=1550, type=Gather]; -"1551 Shape_1417" [id=1551, type=Shape]; -"1552 Constant_1418" [id=1552, type=Constant]; -"1553 Gather_1419" [id=1553, type=Gather]; -"1554 Shape_1420" [id=1554, type=Shape]; -"1555 Constant_1421" [id=1555, type=Constant]; -"1556 Gather_1422" [id=1556, type=Gather]; -"1557 Shape_1423" [id=1557, type=Shape]; -"1558 Constant_1424" [id=1558, type=Constant]; -"1559 Gather_1425" [id=1559, type=Gather]; -"1560 Mul_1426" [id=1560, type=Mul]; -"1561 Unsqueeze_1427" [id=1561, type=Unsqueeze]; -"1562 Unsqueeze_1428" [id=1562, type=Unsqueeze]; -"1563 Unsqueeze_1429" [id=1563, type=Unsqueeze]; -"1564 Concat_1430" [id=1564, type=Concat]; -"1565 Reshape_1431" [id=1565, type=Reshape]; -"1566 Shape_1432" [id=1566, type=Shape]; -"1567 Constant_1433" [id=1567, type=Constant]; -"1568 Gather_1434" [id=1568, type=Gather]; -"1569 Shape_1435" [id=1569, type=Shape]; -"1570 Constant_1436" [id=1570, type=Constant]; -"1571 Gather_1437" [id=1571, type=Gather]; -"1572 Shape_1438" [id=1572, type=Shape]; -"1573 Constant_1439" [id=1573, type=Constant]; -"1574 Gather_1440" [id=1574, type=Gather]; -"1575 Unsqueeze_1441" [id=1575, type=Unsqueeze]; -"1576 Concat_1442" [id=1576, type=Concat]; -"1577 Reshape_1443" [id=1577, type=Reshape]; -"1578 QuantizeLinear_h.6.attn.c_proj.weight_1" [id=1578, type=QuantizeLinear]; -"1579 DequantizeLinear_h.6.attn.c_proj.weight_1" [id=1579, type=DequantizeLinear]; -"1580 Gemm_1444" [id=1580, type=Gemm]; -"1581 Unsqueeze_1445" [id=1581, type=Unsqueeze]; -"1582 Unsqueeze_1446" [id=1582, type=Unsqueeze]; -"1583 Concat_1447" [id=1583, type=Concat]; -"1584 Reshape_1448" [id=1584, type=Reshape]; -"1585 Add_1449" [id=1585, type=Add]; -"1586 ReduceMean_1450" [id=1586, type=ReduceMean]; -"1587 Sub_1451" [id=1587, type=Sub]; -"1588 Constant_1452" [id=1588, type=Constant]; -"1589 Pow_1453" [id=1589, type=Pow]; -"1590 ReduceMean_1454" [id=1590, type=ReduceMean]; -"1591 Constant_1455" [id=1591, type=Constant]; -"1592 Add_1456" [id=1592, type=Add]; -"1593 Sqrt_1457" [id=1593, type=Sqrt]; -"1594 Div_1458" [id=1594, type=Div]; -"1595 Mul_1459" [id=1595, type=Mul]; -"1596 Add_1460" [id=1596, type=Add]; -"1597 QuantizeLinear_1831_1" [id=1597, type=QuantizeLinear]; -"1598 DequantizeLinear_1831_1" [id=1598, type=DequantizeLinear]; -"1599 Shape_1461" [id=1599, type=Shape]; -"1600 Constant_1462" [id=1600, type=Constant]; -"1601 Gather_1463" [id=1601, type=Gather]; -"1602 Shape_1464" [id=1602, type=Shape]; -"1603 Constant_1465" [id=1603, type=Constant]; -"1604 Gather_1466" [id=1604, type=Gather]; -"1605 Shape_1467" [id=1605, type=Shape]; -"1606 Constant_1468" [id=1606, type=Constant]; -"1607 Gather_1469" [id=1607, type=Gather]; -"1608 Unsqueeze_1470" [id=1608, type=Unsqueeze]; -"1609 Concat_1471" [id=1609, type=Concat]; -"1610 Reshape_1472" [id=1610, type=Reshape]; -"1611 QuantizeLinear_h.6.mlp.c_fc.weight_1" [id=1611, type=QuantizeLinear]; -"1612 DequantizeLinear_h.6.mlp.c_fc.weight_1" [id=1612, type=DequantizeLinear]; -"1613 Gemm_1473" [id=1613, type=Gemm]; -"1614 Unsqueeze_1474" [id=1614, type=Unsqueeze]; -"1615 Unsqueeze_1475" [id=1615, type=Unsqueeze]; -"1616 Concat_1476" [id=1616, type=Concat]; -"1617 Reshape_1477" [id=1617, type=Reshape]; -"1618 Constant_1478" [id=1618, type=Constant]; -"1619 Mul_1479" [id=1619, type=Mul]; -"1620 Constant_1480" [id=1620, type=Constant]; -"1621 Pow_1481" [id=1621, type=Pow]; -"1622 Constant_1482" [id=1622, type=Constant]; -"1623 Mul_1483" [id=1623, type=Mul]; -"1624 Add_1484" [id=1624, type=Add]; -"1625 Constant_1485" [id=1625, type=Constant]; -"1626 Mul_1486" [id=1626, type=Mul]; -"1627 Tanh_1487" [id=1627, type=Tanh]; -"1628 Constant_1488" [id=1628, type=Constant]; -"1629 Add_1489" [id=1629, type=Add]; -"1630 Mul_1490" [id=1630, type=Mul]; -"1631 QuantizeLinear_1865_1" [id=1631, type=QuantizeLinear]; -"1632 DequantizeLinear_1865_1" [id=1632, type=DequantizeLinear]; -"1633 Shape_1491" [id=1633, type=Shape]; -"1634 Constant_1492" [id=1634, type=Constant]; -"1635 Gather_1493" [id=1635, type=Gather]; -"1636 Shape_1494" [id=1636, type=Shape]; -"1637 Constant_1495" [id=1637, type=Constant]; -"1638 Gather_1496" [id=1638, type=Gather]; -"1639 Shape_1497" [id=1639, type=Shape]; -"1640 Constant_1498" [id=1640, type=Constant]; -"1641 Gather_1499" [id=1641, type=Gather]; -"1642 Unsqueeze_1500" [id=1642, type=Unsqueeze]; -"1643 Concat_1501" [id=1643, type=Concat]; -"1644 Reshape_1502" [id=1644, type=Reshape]; -"1645 QuantizeLinear_h.6.mlp.c_proj.weight_1" [id=1645, type=QuantizeLinear]; -"1646 DequantizeLinear_h.6.mlp.c_proj.weight_1" [id=1646, type=DequantizeLinear]; -"1647 Gemm_1503" [id=1647, type=Gemm]; -"1648 Unsqueeze_1504" [id=1648, type=Unsqueeze]; -"1649 Unsqueeze_1505" [id=1649, type=Unsqueeze]; -"1650 Concat_1506" [id=1650, type=Concat]; -"1651 Reshape_1507" [id=1651, type=Reshape]; -"1652 Add_1508" [id=1652, type=Add]; -"1653 ReduceMean_1509" [id=1653, type=ReduceMean]; -"1654 Sub_1510" [id=1654, type=Sub]; -"1655 Constant_1511" [id=1655, type=Constant]; -"1656 Pow_1512" [id=1656, type=Pow]; -"1657 ReduceMean_1513" [id=1657, type=ReduceMean]; -"1658 Constant_1514" [id=1658, type=Constant]; -"1659 Add_1515" [id=1659, type=Add]; -"1660 Sqrt_1516" [id=1660, type=Sqrt]; -"1661 Div_1517" [id=1661, type=Div]; -"1662 Mul_1518" [id=1662, type=Mul]; -"1663 Add_1519" [id=1663, type=Add]; -"1664 QuantizeLinear_1898_1" [id=1664, type=QuantizeLinear]; -"1665 DequantizeLinear_1898_1" [id=1665, type=DequantizeLinear]; -"1666 Shape_1520" [id=1666, type=Shape]; -"1667 Constant_1521" [id=1667, type=Constant]; -"1668 Gather_1522" [id=1668, type=Gather]; -"1669 Shape_1523" [id=1669, type=Shape]; -"1670 Constant_1524" [id=1670, type=Constant]; -"1671 Gather_1525" [id=1671, type=Gather]; -"1672 Shape_1526" [id=1672, type=Shape]; -"1673 Constant_1527" [id=1673, type=Constant]; -"1674 Gather_1528" [id=1674, type=Gather]; -"1675 Unsqueeze_1529" [id=1675, type=Unsqueeze]; -"1676 Concat_1530" [id=1676, type=Concat]; -"1677 Reshape_1531" [id=1677, type=Reshape]; -"1678 QuantizeLinear_h.7.attn.c_attn.weight_1" [id=1678, type=QuantizeLinear]; -"1679 DequantizeLinear_h.7.attn.c_attn.weight_1" [id=1679, type=DequantizeLinear]; -"1680 Gemm_1532" [id=1680, type=Gemm]; -"1681 Unsqueeze_1533" [id=1681, type=Unsqueeze]; -"1682 Unsqueeze_1534" [id=1682, type=Unsqueeze]; -"1683 Concat_1535" [id=1683, type=Concat]; -"1684 Reshape_1536" [id=1684, type=Reshape]; -"1685 Split_1537" [id=1685, type=Split]; -"1686 QuantizeLinear_query.15_1" [id=1686, type=QuantizeLinear]; -"1687 DequantizeLinear_query.15_1" [id=1687, type=DequantizeLinear]; -"1688 Shape_1538" [id=1688, type=Shape]; -"1689 Constant_1539" [id=1689, type=Constant]; -"1690 Gather_1540" [id=1690, type=Gather]; -"1691 Shape_1541" [id=1691, type=Shape]; -"1692 Constant_1542" [id=1692, type=Constant]; -"1693 Gather_1543" [id=1693, type=Gather]; -"1694 Shape_1544" [id=1694, type=Shape]; -"1695 Constant_1545" [id=1695, type=Constant]; -"1696 Gather_1546" [id=1696, type=Gather]; -"1697 Constant_1547" [id=1697, type=Constant]; -"1698 Div_1548" [id=1698, type=Div]; -"1699 Cast_1549" [id=1699, type=Cast]; -"1700 Cast_1550" [id=1700, type=Cast]; -"1701 Unsqueeze_1551" [id=1701, type=Unsqueeze]; -"1702 Unsqueeze_1552" [id=1702, type=Unsqueeze]; -"1703 Unsqueeze_1553" [id=1703, type=Unsqueeze]; -"1704 Concat_1554" [id=1704, type=Concat]; -"1705 Reshape_1555" [id=1705, type=Reshape]; -"1706 Transpose_1556" [id=1706, type=Transpose]; -"1707 Shape_1557" [id=1707, type=Shape]; -"1708 Constant_1558" [id=1708, type=Constant]; -"1709 Gather_1559" [id=1709, type=Gather]; -"1710 Shape_1560" [id=1710, type=Shape]; -"1711 Constant_1561" [id=1711, type=Constant]; -"1712 Gather_1562" [id=1712, type=Gather]; -"1713 Shape_1563" [id=1713, type=Shape]; -"1714 Constant_1564" [id=1714, type=Constant]; -"1715 Gather_1565" [id=1715, type=Gather]; -"1716 Constant_1566" [id=1716, type=Constant]; -"1717 Div_1567" [id=1717, type=Div]; -"1718 Cast_1568" [id=1718, type=Cast]; -"1719 Cast_1569" [id=1719, type=Cast]; -"1720 Unsqueeze_1570" [id=1720, type=Unsqueeze]; -"1721 Unsqueeze_1571" [id=1721, type=Unsqueeze]; -"1722 Unsqueeze_1572" [id=1722, type=Unsqueeze]; -"1723 Concat_1573" [id=1723, type=Concat]; -"1724 Reshape_1574" [id=1724, type=Reshape]; -"1725 QuantizeLinear_1963_1" [id=1725, type=QuantizeLinear]; -"1726 DequantizeLinear_1963_1" [id=1726, type=DequantizeLinear]; -"1727 Transpose_1575" [id=1727, type=Transpose]; -"1728 Shape_1576" [id=1728, type=Shape]; -"1729 Constant_1577" [id=1729, type=Constant]; -"1730 Gather_1578" [id=1730, type=Gather]; -"1731 Shape_1579" [id=1731, type=Shape]; -"1732 Constant_1580" [id=1732, type=Constant]; -"1733 Gather_1581" [id=1733, type=Gather]; -"1734 Shape_1582" [id=1734, type=Shape]; -"1735 Constant_1583" [id=1735, type=Constant]; -"1736 Gather_1584" [id=1736, type=Gather]; -"1737 Constant_1585" [id=1737, type=Constant]; -"1738 Div_1586" [id=1738, type=Div]; -"1739 Cast_1587" [id=1739, type=Cast]; -"1740 Cast_1588" [id=1740, type=Cast]; -"1741 Unsqueeze_1589" [id=1741, type=Unsqueeze]; -"1742 Unsqueeze_1590" [id=1742, type=Unsqueeze]; -"1743 Unsqueeze_1591" [id=1743, type=Unsqueeze]; -"1744 Concat_1592" [id=1744, type=Concat]; -"1745 Reshape_1593" [id=1745, type=Reshape]; -"1746 Transpose_1594" [id=1746, type=Transpose]; -"1747 Transpose_1595" [id=1747, type=Transpose]; -"1748 Unsqueeze_1596" [id=1748, type=Unsqueeze]; -"1749 Unsqueeze_1597" [id=1749, type=Unsqueeze]; -"1750 Concat_1598" [id=1750, type=Concat]; -"1751 MatMul_1599" [id=1751, type=MatMul]; -"1752 Constant_1600" [id=1752, type=Constant]; -"1753 Div_1601" [id=1753, type=Div]; -"1754 Shape_1602" [id=1754, type=Shape]; -"1755 Constant_1603" [id=1755, type=Constant]; -"1756 Gather_1604" [id=1756, type=Gather]; -"1757 Shape_1605" [id=1757, type=Shape]; -"1758 Constant_1606" [id=1758, type=Constant]; -"1759 Gather_1607" [id=1759, type=Gather]; -"1760 Sub_1608" [id=1760, type=Sub]; -"1761 Unsqueeze_1609" [id=1761, type=Unsqueeze]; -"1762 Unsqueeze_1610" [id=1762, type=Unsqueeze]; -"1763 Constant_1611" [id=1763, type=Constant]; -"1764 Slice_1612" [id=1764, type=Slice]; -"1765 Unsqueeze_1613" [id=1765, type=Unsqueeze]; -"1766 Constant_1614" [id=1766, type=Constant]; -"1767 Slice_1615" [id=1767, type=Slice]; -"1768 Mul_1616" [id=1768, type=Mul]; -"1769 Constant_1617" [id=1769, type=Constant]; -"1770 Sub_1618" [id=1770, type=Sub]; -"1771 Constant_1619" [id=1771, type=Constant]; -"1772 Mul_1620" [id=1772, type=Mul]; -"1773 Sub_1621" [id=1773, type=Sub]; -"1774 Softmax_1622" [id=1774, type=Softmax]; -"1775 MatMul_1623" [id=1775, type=MatMul]; -"1776 QuantizeLinear_2020_1" [id=1776, type=QuantizeLinear]; -"1777 DequantizeLinear_2020_1" [id=1777, type=DequantizeLinear]; -"1778 Transpose_1624" [id=1778, type=Transpose]; -"1779 Shape_1625" [id=1779, type=Shape]; -"1780 Constant_1626" [id=1780, type=Constant]; -"1781 Gather_1627" [id=1781, type=Gather]; -"1782 Shape_1628" [id=1782, type=Shape]; -"1783 Constant_1629" [id=1783, type=Constant]; -"1784 Gather_1630" [id=1784, type=Gather]; -"1785 Shape_1631" [id=1785, type=Shape]; -"1786 Constant_1632" [id=1786, type=Constant]; -"1787 Gather_1633" [id=1787, type=Gather]; -"1788 Shape_1634" [id=1788, type=Shape]; -"1789 Constant_1635" [id=1789, type=Constant]; -"1790 Gather_1636" [id=1790, type=Gather]; -"1791 Mul_1637" [id=1791, type=Mul]; -"1792 Unsqueeze_1638" [id=1792, type=Unsqueeze]; -"1793 Unsqueeze_1639" [id=1793, type=Unsqueeze]; -"1794 Unsqueeze_1640" [id=1794, type=Unsqueeze]; -"1795 Concat_1641" [id=1795, type=Concat]; -"1796 Reshape_1642" [id=1796, type=Reshape]; -"1797 Shape_1643" [id=1797, type=Shape]; -"1798 Constant_1644" [id=1798, type=Constant]; -"1799 Gather_1645" [id=1799, type=Gather]; -"1800 Shape_1646" [id=1800, type=Shape]; -"1801 Constant_1647" [id=1801, type=Constant]; -"1802 Gather_1648" [id=1802, type=Gather]; -"1803 Shape_1649" [id=1803, type=Shape]; -"1804 Constant_1650" [id=1804, type=Constant]; -"1805 Gather_1651" [id=1805, type=Gather]; -"1806 Unsqueeze_1652" [id=1806, type=Unsqueeze]; -"1807 Concat_1653" [id=1807, type=Concat]; -"1808 Reshape_1654" [id=1808, type=Reshape]; -"1809 QuantizeLinear_h.7.attn.c_proj.weight_1" [id=1809, type=QuantizeLinear]; -"1810 DequantizeLinear_h.7.attn.c_proj.weight_1" [id=1810, type=DequantizeLinear]; -"1811 Gemm_1655" [id=1811, type=Gemm]; -"1812 Unsqueeze_1656" [id=1812, type=Unsqueeze]; -"1813 Unsqueeze_1657" [id=1813, type=Unsqueeze]; -"1814 Concat_1658" [id=1814, type=Concat]; -"1815 Reshape_1659" [id=1815, type=Reshape]; -"1816 Add_1660" [id=1816, type=Add]; -"1817 ReduceMean_1661" [id=1817, type=ReduceMean]; -"1818 Sub_1662" [id=1818, type=Sub]; -"1819 Constant_1663" [id=1819, type=Constant]; -"1820 Pow_1664" [id=1820, type=Pow]; -"1821 ReduceMean_1665" [id=1821, type=ReduceMean]; -"1822 Constant_1666" [id=1822, type=Constant]; -"1823 Add_1667" [id=1823, type=Add]; -"1824 Sqrt_1668" [id=1824, type=Sqrt]; -"1825 Div_1669" [id=1825, type=Div]; -"1826 Mul_1670" [id=1826, type=Mul]; -"1827 Add_1671" [id=1827, type=Add]; -"1828 QuantizeLinear_2072_1" [id=1828, type=QuantizeLinear]; -"1829 DequantizeLinear_2072_1" [id=1829, type=DequantizeLinear]; -"1830 Shape_1672" [id=1830, type=Shape]; -"1831 Constant_1673" [id=1831, type=Constant]; -"1832 Gather_1674" [id=1832, type=Gather]; -"1833 Shape_1675" [id=1833, type=Shape]; -"1834 Constant_1676" [id=1834, type=Constant]; -"1835 Gather_1677" [id=1835, type=Gather]; -"1836 Shape_1678" [id=1836, type=Shape]; -"1837 Constant_1679" [id=1837, type=Constant]; -"1838 Gather_1680" [id=1838, type=Gather]; -"1839 Unsqueeze_1681" [id=1839, type=Unsqueeze]; -"1840 Concat_1682" [id=1840, type=Concat]; -"1841 Reshape_1683" [id=1841, type=Reshape]; -"1842 QuantizeLinear_h.7.mlp.c_fc.weight_1" [id=1842, type=QuantizeLinear]; -"1843 DequantizeLinear_h.7.mlp.c_fc.weight_1" [id=1843, type=DequantizeLinear]; -"1844 Gemm_1684" [id=1844, type=Gemm]; -"1845 Unsqueeze_1685" [id=1845, type=Unsqueeze]; -"1846 Unsqueeze_1686" [id=1846, type=Unsqueeze]; -"1847 Concat_1687" [id=1847, type=Concat]; -"1848 Reshape_1688" [id=1848, type=Reshape]; -"1849 Constant_1689" [id=1849, type=Constant]; -"1850 Mul_1690" [id=1850, type=Mul]; -"1851 Constant_1691" [id=1851, type=Constant]; -"1852 Pow_1692" [id=1852, type=Pow]; -"1853 Constant_1693" [id=1853, type=Constant]; -"1854 Mul_1694" [id=1854, type=Mul]; -"1855 Add_1695" [id=1855, type=Add]; -"1856 Constant_1696" [id=1856, type=Constant]; -"1857 Mul_1697" [id=1857, type=Mul]; -"1858 Tanh_1698" [id=1858, type=Tanh]; -"1859 Constant_1699" [id=1859, type=Constant]; -"1860 Add_1700" [id=1860, type=Add]; -"1861 Mul_1701" [id=1861, type=Mul]; -"1862 QuantizeLinear_2106_1" [id=1862, type=QuantizeLinear]; -"1863 DequantizeLinear_2106_1" [id=1863, type=DequantizeLinear]; -"1864 Shape_1702" [id=1864, type=Shape]; -"1865 Constant_1703" [id=1865, type=Constant]; -"1866 Gather_1704" [id=1866, type=Gather]; -"1867 Shape_1705" [id=1867, type=Shape]; -"1868 Constant_1706" [id=1868, type=Constant]; -"1869 Gather_1707" [id=1869, type=Gather]; -"1870 Shape_1708" [id=1870, type=Shape]; -"1871 Constant_1709" [id=1871, type=Constant]; -"1872 Gather_1710" [id=1872, type=Gather]; -"1873 Unsqueeze_1711" [id=1873, type=Unsqueeze]; -"1874 Concat_1712" [id=1874, type=Concat]; -"1875 Reshape_1713" [id=1875, type=Reshape]; -"1876 QuantizeLinear_h.7.mlp.c_proj.weight_1" [id=1876, type=QuantizeLinear]; -"1877 DequantizeLinear_h.7.mlp.c_proj.weight_1" [id=1877, type=DequantizeLinear]; -"1878 Gemm_1714" [id=1878, type=Gemm]; -"1879 Unsqueeze_1715" [id=1879, type=Unsqueeze]; -"1880 Unsqueeze_1716" [id=1880, type=Unsqueeze]; -"1881 Concat_1717" [id=1881, type=Concat]; -"1882 Reshape_1718" [id=1882, type=Reshape]; -"1883 Add_1719" [id=1883, type=Add]; -"1884 ReduceMean_1720" [id=1884, type=ReduceMean]; -"1885 Sub_1721" [id=1885, type=Sub]; -"1886 Constant_1722" [id=1886, type=Constant]; -"1887 Pow_1723" [id=1887, type=Pow]; -"1888 ReduceMean_1724" [id=1888, type=ReduceMean]; -"1889 Constant_1725" [id=1889, type=Constant]; -"1890 Add_1726" [id=1890, type=Add]; -"1891 Sqrt_1727" [id=1891, type=Sqrt]; -"1892 Div_1728" [id=1892, type=Div]; -"1893 Mul_1729" [id=1893, type=Mul]; -"1894 Add_1730" [id=1894, type=Add]; -"1895 QuantizeLinear_2139_1" [id=1895, type=QuantizeLinear]; -"1896 DequantizeLinear_2139_1" [id=1896, type=DequantizeLinear]; -"1897 Shape_1731" [id=1897, type=Shape]; -"1898 Constant_1732" [id=1898, type=Constant]; -"1899 Gather_1733" [id=1899, type=Gather]; -"1900 Shape_1734" [id=1900, type=Shape]; -"1901 Constant_1735" [id=1901, type=Constant]; -"1902 Gather_1736" [id=1902, type=Gather]; -"1903 Shape_1737" [id=1903, type=Shape]; -"1904 Constant_1738" [id=1904, type=Constant]; -"1905 Gather_1739" [id=1905, type=Gather]; -"1906 Unsqueeze_1740" [id=1906, type=Unsqueeze]; -"1907 Concat_1741" [id=1907, type=Concat]; -"1908 Reshape_1742" [id=1908, type=Reshape]; -"1909 QuantizeLinear_h.8.attn.c_attn.weight_1" [id=1909, type=QuantizeLinear]; -"1910 DequantizeLinear_h.8.attn.c_attn.weight_1" [id=1910, type=DequantizeLinear]; -"1911 Gemm_1743" [id=1911, type=Gemm]; -"1912 Unsqueeze_1744" [id=1912, type=Unsqueeze]; -"1913 Unsqueeze_1745" [id=1913, type=Unsqueeze]; -"1914 Concat_1746" [id=1914, type=Concat]; -"1915 Reshape_1747" [id=1915, type=Reshape]; -"1916 Split_1748" [id=1916, type=Split]; -"1917 QuantizeLinear_query.17_1" [id=1917, type=QuantizeLinear]; -"1918 DequantizeLinear_query.17_1" [id=1918, type=DequantizeLinear]; -"1919 Shape_1749" [id=1919, type=Shape]; -"1920 Constant_1750" [id=1920, type=Constant]; -"1921 Gather_1751" [id=1921, type=Gather]; -"1922 Shape_1752" [id=1922, type=Shape]; -"1923 Constant_1753" [id=1923, type=Constant]; -"1924 Gather_1754" [id=1924, type=Gather]; -"1925 Shape_1755" [id=1925, type=Shape]; -"1926 Constant_1756" [id=1926, type=Constant]; -"1927 Gather_1757" [id=1927, type=Gather]; -"1928 Constant_1758" [id=1928, type=Constant]; -"1929 Div_1759" [id=1929, type=Div]; -"1930 Cast_1760" [id=1930, type=Cast]; -"1931 Cast_1761" [id=1931, type=Cast]; -"1932 Unsqueeze_1762" [id=1932, type=Unsqueeze]; -"1933 Unsqueeze_1763" [id=1933, type=Unsqueeze]; -"1934 Unsqueeze_1764" [id=1934, type=Unsqueeze]; -"1935 Concat_1765" [id=1935, type=Concat]; -"1936 Reshape_1766" [id=1936, type=Reshape]; -"1937 Transpose_1767" [id=1937, type=Transpose]; -"1938 Shape_1768" [id=1938, type=Shape]; -"1939 Constant_1769" [id=1939, type=Constant]; -"1940 Gather_1770" [id=1940, type=Gather]; -"1941 Shape_1771" [id=1941, type=Shape]; -"1942 Constant_1772" [id=1942, type=Constant]; -"1943 Gather_1773" [id=1943, type=Gather]; -"1944 Shape_1774" [id=1944, type=Shape]; -"1945 Constant_1775" [id=1945, type=Constant]; -"1946 Gather_1776" [id=1946, type=Gather]; -"1947 Constant_1777" [id=1947, type=Constant]; -"1948 Div_1778" [id=1948, type=Div]; -"1949 Cast_1779" [id=1949, type=Cast]; -"1950 Cast_1780" [id=1950, type=Cast]; -"1951 Unsqueeze_1781" [id=1951, type=Unsqueeze]; -"1952 Unsqueeze_1782" [id=1952, type=Unsqueeze]; -"1953 Unsqueeze_1783" [id=1953, type=Unsqueeze]; -"1954 Concat_1784" [id=1954, type=Concat]; -"1955 Reshape_1785" [id=1955, type=Reshape]; -"1956 QuantizeLinear_2204_1" [id=1956, type=QuantizeLinear]; -"1957 DequantizeLinear_2204_1" [id=1957, type=DequantizeLinear]; -"1958 Transpose_1786" [id=1958, type=Transpose]; -"1959 Shape_1787" [id=1959, type=Shape]; -"1960 Constant_1788" [id=1960, type=Constant]; -"1961 Gather_1789" [id=1961, type=Gather]; -"1962 Shape_1790" [id=1962, type=Shape]; -"1963 Constant_1791" [id=1963, type=Constant]; -"1964 Gather_1792" [id=1964, type=Gather]; -"1965 Shape_1793" [id=1965, type=Shape]; -"1966 Constant_1794" [id=1966, type=Constant]; -"1967 Gather_1795" [id=1967, type=Gather]; -"1968 Constant_1796" [id=1968, type=Constant]; -"1969 Div_1797" [id=1969, type=Div]; -"1970 Cast_1798" [id=1970, type=Cast]; -"1971 Cast_1799" [id=1971, type=Cast]; -"1972 Unsqueeze_1800" [id=1972, type=Unsqueeze]; -"1973 Unsqueeze_1801" [id=1973, type=Unsqueeze]; -"1974 Unsqueeze_1802" [id=1974, type=Unsqueeze]; -"1975 Concat_1803" [id=1975, type=Concat]; -"1976 Reshape_1804" [id=1976, type=Reshape]; -"1977 Transpose_1805" [id=1977, type=Transpose]; -"1978 Transpose_1806" [id=1978, type=Transpose]; -"1979 Unsqueeze_1807" [id=1979, type=Unsqueeze]; -"1980 Unsqueeze_1808" [id=1980, type=Unsqueeze]; -"1981 Concat_1809" [id=1981, type=Concat]; -"1982 MatMul_1810" [id=1982, type=MatMul]; -"1983 Constant_1811" [id=1983, type=Constant]; -"1984 Div_1812" [id=1984, type=Div]; -"1985 Shape_1813" [id=1985, type=Shape]; -"1986 Constant_1814" [id=1986, type=Constant]; -"1987 Gather_1815" [id=1987, type=Gather]; -"1988 Shape_1816" [id=1988, type=Shape]; -"1989 Constant_1817" [id=1989, type=Constant]; -"1990 Gather_1818" [id=1990, type=Gather]; -"1991 Sub_1819" [id=1991, type=Sub]; -"1992 Unsqueeze_1820" [id=1992, type=Unsqueeze]; -"1993 Unsqueeze_1821" [id=1993, type=Unsqueeze]; -"1994 Constant_1822" [id=1994, type=Constant]; -"1995 Slice_1823" [id=1995, type=Slice]; -"1996 Unsqueeze_1824" [id=1996, type=Unsqueeze]; -"1997 Constant_1825" [id=1997, type=Constant]; -"1998 Slice_1826" [id=1998, type=Slice]; -"1999 Mul_1827" [id=1999, type=Mul]; -"2000 Constant_1828" [id=2000, type=Constant]; -"2001 Sub_1829" [id=2001, type=Sub]; -"2002 Constant_1830" [id=2002, type=Constant]; -"2003 Mul_1831" [id=2003, type=Mul]; -"2004 Sub_1832" [id=2004, type=Sub]; -"2005 Softmax_1833" [id=2005, type=Softmax]; -"2006 MatMul_1834" [id=2006, type=MatMul]; -"2007 QuantizeLinear_2261_1" [id=2007, type=QuantizeLinear]; -"2008 DequantizeLinear_2261_1" [id=2008, type=DequantizeLinear]; -"2009 Transpose_1835" [id=2009, type=Transpose]; -"2010 Shape_1836" [id=2010, type=Shape]; -"2011 Constant_1837" [id=2011, type=Constant]; -"2012 Gather_1838" [id=2012, type=Gather]; -"2013 Shape_1839" [id=2013, type=Shape]; -"2014 Constant_1840" [id=2014, type=Constant]; -"2015 Gather_1841" [id=2015, type=Gather]; -"2016 Shape_1842" [id=2016, type=Shape]; -"2017 Constant_1843" [id=2017, type=Constant]; -"2018 Gather_1844" [id=2018, type=Gather]; -"2019 Shape_1845" [id=2019, type=Shape]; -"2020 Constant_1846" [id=2020, type=Constant]; -"2021 Gather_1847" [id=2021, type=Gather]; -"2022 Mul_1848" [id=2022, type=Mul]; -"2023 Unsqueeze_1849" [id=2023, type=Unsqueeze]; -"2024 Unsqueeze_1850" [id=2024, type=Unsqueeze]; -"2025 Unsqueeze_1851" [id=2025, type=Unsqueeze]; -"2026 Concat_1852" [id=2026, type=Concat]; -"2027 Reshape_1853" [id=2027, type=Reshape]; -"2028 Shape_1854" [id=2028, type=Shape]; -"2029 Constant_1855" [id=2029, type=Constant]; -"2030 Gather_1856" [id=2030, type=Gather]; -"2031 Shape_1857" [id=2031, type=Shape]; -"2032 Constant_1858" [id=2032, type=Constant]; -"2033 Gather_1859" [id=2033, type=Gather]; -"2034 Shape_1860" [id=2034, type=Shape]; -"2035 Constant_1861" [id=2035, type=Constant]; -"2036 Gather_1862" [id=2036, type=Gather]; -"2037 Unsqueeze_1863" [id=2037, type=Unsqueeze]; -"2038 Concat_1864" [id=2038, type=Concat]; -"2039 Reshape_1865" [id=2039, type=Reshape]; -"2040 QuantizeLinear_h.8.attn.c_proj.weight_1" [id=2040, type=QuantizeLinear]; -"2041 DequantizeLinear_h.8.attn.c_proj.weight_1" [id=2041, type=DequantizeLinear]; -"2042 Gemm_1866" [id=2042, type=Gemm]; -"2043 Unsqueeze_1867" [id=2043, type=Unsqueeze]; -"2044 Unsqueeze_1868" [id=2044, type=Unsqueeze]; -"2045 Concat_1869" [id=2045, type=Concat]; -"2046 Reshape_1870" [id=2046, type=Reshape]; -"2047 Add_1871" [id=2047, type=Add]; -"2048 ReduceMean_1872" [id=2048, type=ReduceMean]; -"2049 Sub_1873" [id=2049, type=Sub]; -"2050 Constant_1874" [id=2050, type=Constant]; -"2051 Pow_1875" [id=2051, type=Pow]; -"2052 ReduceMean_1876" [id=2052, type=ReduceMean]; -"2053 Constant_1877" [id=2053, type=Constant]; -"2054 Add_1878" [id=2054, type=Add]; -"2055 Sqrt_1879" [id=2055, type=Sqrt]; -"2056 Div_1880" [id=2056, type=Div]; -"2057 Mul_1881" [id=2057, type=Mul]; -"2058 Add_1882" [id=2058, type=Add]; -"2059 QuantizeLinear_2313_1" [id=2059, type=QuantizeLinear]; -"2060 DequantizeLinear_2313_1" [id=2060, type=DequantizeLinear]; -"2061 Shape_1883" [id=2061, type=Shape]; -"2062 Constant_1884" [id=2062, type=Constant]; -"2063 Gather_1885" [id=2063, type=Gather]; -"2064 Shape_1886" [id=2064, type=Shape]; -"2065 Constant_1887" [id=2065, type=Constant]; -"2066 Gather_1888" [id=2066, type=Gather]; -"2067 Shape_1889" [id=2067, type=Shape]; -"2068 Constant_1890" [id=2068, type=Constant]; -"2069 Gather_1891" [id=2069, type=Gather]; -"2070 Unsqueeze_1892" [id=2070, type=Unsqueeze]; -"2071 Concat_1893" [id=2071, type=Concat]; -"2072 Reshape_1894" [id=2072, type=Reshape]; -"2073 QuantizeLinear_h.8.mlp.c_fc.weight_1" [id=2073, type=QuantizeLinear]; -"2074 DequantizeLinear_h.8.mlp.c_fc.weight_1" [id=2074, type=DequantizeLinear]; -"2075 Gemm_1895" [id=2075, type=Gemm]; -"2076 Unsqueeze_1896" [id=2076, type=Unsqueeze]; -"2077 Unsqueeze_1897" [id=2077, type=Unsqueeze]; -"2078 Concat_1898" [id=2078, type=Concat]; -"2079 Reshape_1899" [id=2079, type=Reshape]; -"2080 Constant_1900" [id=2080, type=Constant]; -"2081 Mul_1901" [id=2081, type=Mul]; -"2082 Constant_1902" [id=2082, type=Constant]; -"2083 Pow_1903" [id=2083, type=Pow]; -"2084 Constant_1904" [id=2084, type=Constant]; -"2085 Mul_1905" [id=2085, type=Mul]; -"2086 Add_1906" [id=2086, type=Add]; -"2087 Constant_1907" [id=2087, type=Constant]; -"2088 Mul_1908" [id=2088, type=Mul]; -"2089 Tanh_1909" [id=2089, type=Tanh]; -"2090 Constant_1910" [id=2090, type=Constant]; -"2091 Add_1911" [id=2091, type=Add]; -"2092 Mul_1912" [id=2092, type=Mul]; -"2093 QuantizeLinear_2347_1" [id=2093, type=QuantizeLinear]; -"2094 DequantizeLinear_2347_1" [id=2094, type=DequantizeLinear]; -"2095 Shape_1913" [id=2095, type=Shape]; -"2096 Constant_1914" [id=2096, type=Constant]; -"2097 Gather_1915" [id=2097, type=Gather]; -"2098 Shape_1916" [id=2098, type=Shape]; -"2099 Constant_1917" [id=2099, type=Constant]; -"2100 Gather_1918" [id=2100, type=Gather]; -"2101 Shape_1919" [id=2101, type=Shape]; -"2102 Constant_1920" [id=2102, type=Constant]; -"2103 Gather_1921" [id=2103, type=Gather]; -"2104 Unsqueeze_1922" [id=2104, type=Unsqueeze]; -"2105 Concat_1923" [id=2105, type=Concat]; -"2106 Reshape_1924" [id=2106, type=Reshape]; -"2107 QuantizeLinear_h.8.mlp.c_proj.weight_1" [id=2107, type=QuantizeLinear]; -"2108 DequantizeLinear_h.8.mlp.c_proj.weight_1" [id=2108, type=DequantizeLinear]; -"2109 Gemm_1925" [id=2109, type=Gemm]; -"2110 Unsqueeze_1926" [id=2110, type=Unsqueeze]; -"2111 Unsqueeze_1927" [id=2111, type=Unsqueeze]; -"2112 Concat_1928" [id=2112, type=Concat]; -"2113 Reshape_1929" [id=2113, type=Reshape]; -"2114 Add_1930" [id=2114, type=Add]; -"2115 ReduceMean_1931" [id=2115, type=ReduceMean]; -"2116 Sub_1932" [id=2116, type=Sub]; -"2117 Constant_1933" [id=2117, type=Constant]; -"2118 Pow_1934" [id=2118, type=Pow]; -"2119 ReduceMean_1935" [id=2119, type=ReduceMean]; -"2120 Constant_1936" [id=2120, type=Constant]; -"2121 Add_1937" [id=2121, type=Add]; -"2122 Sqrt_1938" [id=2122, type=Sqrt]; -"2123 Div_1939" [id=2123, type=Div]; -"2124 Mul_1940" [id=2124, type=Mul]; -"2125 Add_1941" [id=2125, type=Add]; -"2126 QuantizeLinear_2380_1" [id=2126, type=QuantizeLinear]; -"2127 DequantizeLinear_2380_1" [id=2127, type=DequantizeLinear]; -"2128 Shape_1942" [id=2128, type=Shape]; -"2129 Constant_1943" [id=2129, type=Constant]; -"2130 Gather_1944" [id=2130, type=Gather]; -"2131 Shape_1945" [id=2131, type=Shape]; -"2132 Constant_1946" [id=2132, type=Constant]; -"2133 Gather_1947" [id=2133, type=Gather]; -"2134 Shape_1948" [id=2134, type=Shape]; -"2135 Constant_1949" [id=2135, type=Constant]; -"2136 Gather_1950" [id=2136, type=Gather]; -"2137 Unsqueeze_1951" [id=2137, type=Unsqueeze]; -"2138 Concat_1952" [id=2138, type=Concat]; -"2139 Reshape_1953" [id=2139, type=Reshape]; -"2140 QuantizeLinear_h.9.attn.c_attn.weight_1" [id=2140, type=QuantizeLinear]; -"2141 DequantizeLinear_h.9.attn.c_attn.weight_1" [id=2141, type=DequantizeLinear]; -"2142 Gemm_1954" [id=2142, type=Gemm]; -"2143 Unsqueeze_1955" [id=2143, type=Unsqueeze]; -"2144 Unsqueeze_1956" [id=2144, type=Unsqueeze]; -"2145 Concat_1957" [id=2145, type=Concat]; -"2146 Reshape_1958" [id=2146, type=Reshape]; -"2147 Split_1959" [id=2147, type=Split]; -"2148 QuantizeLinear_query.19_1" [id=2148, type=QuantizeLinear]; -"2149 DequantizeLinear_query.19_1" [id=2149, type=DequantizeLinear]; -"2150 Shape_1960" [id=2150, type=Shape]; -"2151 Constant_1961" [id=2151, type=Constant]; -"2152 Gather_1962" [id=2152, type=Gather]; -"2153 Shape_1963" [id=2153, type=Shape]; -"2154 Constant_1964" [id=2154, type=Constant]; -"2155 Gather_1965" [id=2155, type=Gather]; -"2156 Shape_1966" [id=2156, type=Shape]; -"2157 Constant_1967" [id=2157, type=Constant]; -"2158 Gather_1968" [id=2158, type=Gather]; -"2159 Constant_1969" [id=2159, type=Constant]; -"2160 Div_1970" [id=2160, type=Div]; -"2161 Cast_1971" [id=2161, type=Cast]; -"2162 Cast_1972" [id=2162, type=Cast]; -"2163 Unsqueeze_1973" [id=2163, type=Unsqueeze]; -"2164 Unsqueeze_1974" [id=2164, type=Unsqueeze]; -"2165 Unsqueeze_1975" [id=2165, type=Unsqueeze]; -"2166 Concat_1976" [id=2166, type=Concat]; -"2167 Reshape_1977" [id=2167, type=Reshape]; -"2168 Transpose_1978" [id=2168, type=Transpose]; -"2169 Shape_1979" [id=2169, type=Shape]; -"2170 Constant_1980" [id=2170, type=Constant]; -"2171 Gather_1981" [id=2171, type=Gather]; -"2172 Shape_1982" [id=2172, type=Shape]; -"2173 Constant_1983" [id=2173, type=Constant]; -"2174 Gather_1984" [id=2174, type=Gather]; -"2175 Shape_1985" [id=2175, type=Shape]; -"2176 Constant_1986" [id=2176, type=Constant]; -"2177 Gather_1987" [id=2177, type=Gather]; -"2178 Constant_1988" [id=2178, type=Constant]; -"2179 Div_1989" [id=2179, type=Div]; -"2180 Cast_1990" [id=2180, type=Cast]; -"2181 Cast_1991" [id=2181, type=Cast]; -"2182 Unsqueeze_1992" [id=2182, type=Unsqueeze]; -"2183 Unsqueeze_1993" [id=2183, type=Unsqueeze]; -"2184 Unsqueeze_1994" [id=2184, type=Unsqueeze]; -"2185 Concat_1995" [id=2185, type=Concat]; -"2186 Reshape_1996" [id=2186, type=Reshape]; -"2187 QuantizeLinear_2445_1" [id=2187, type=QuantizeLinear]; -"2188 DequantizeLinear_2445_1" [id=2188, type=DequantizeLinear]; -"2189 Transpose_1997" [id=2189, type=Transpose]; -"2190 Shape_1998" [id=2190, type=Shape]; -"2191 Constant_1999" [id=2191, type=Constant]; -"2192 Gather_2000" [id=2192, type=Gather]; -"2193 Shape_2001" [id=2193, type=Shape]; -"2194 Constant_2002" [id=2194, type=Constant]; -"2195 Gather_2003" [id=2195, type=Gather]; -"2196 Shape_2004" [id=2196, type=Shape]; -"2197 Constant_2005" [id=2197, type=Constant]; -"2198 Gather_2006" [id=2198, type=Gather]; -"2199 Constant_2007" [id=2199, type=Constant]; -"2200 Div_2008" [id=2200, type=Div]; -"2201 Cast_2009" [id=2201, type=Cast]; -"2202 Cast_2010" [id=2202, type=Cast]; -"2203 Unsqueeze_2011" [id=2203, type=Unsqueeze]; -"2204 Unsqueeze_2012" [id=2204, type=Unsqueeze]; -"2205 Unsqueeze_2013" [id=2205, type=Unsqueeze]; -"2206 Concat_2014" [id=2206, type=Concat]; -"2207 Reshape_2015" [id=2207, type=Reshape]; -"2208 Transpose_2016" [id=2208, type=Transpose]; -"2209 Transpose_2017" [id=2209, type=Transpose]; -"2210 Unsqueeze_2018" [id=2210, type=Unsqueeze]; -"2211 Unsqueeze_2019" [id=2211, type=Unsqueeze]; -"2212 Concat_2020" [id=2212, type=Concat]; -"2213 MatMul_2021" [id=2213, type=MatMul]; -"2214 Constant_2022" [id=2214, type=Constant]; -"2215 Div_2023" [id=2215, type=Div]; -"2216 Shape_2024" [id=2216, type=Shape]; -"2217 Constant_2025" [id=2217, type=Constant]; -"2218 Gather_2026" [id=2218, type=Gather]; -"2219 Shape_2027" [id=2219, type=Shape]; -"2220 Constant_2028" [id=2220, type=Constant]; -"2221 Gather_2029" [id=2221, type=Gather]; -"2222 Sub_2030" [id=2222, type=Sub]; -"2223 Unsqueeze_2031" [id=2223, type=Unsqueeze]; -"2224 Unsqueeze_2032" [id=2224, type=Unsqueeze]; -"2225 Constant_2033" [id=2225, type=Constant]; -"2226 Slice_2034" [id=2226, type=Slice]; -"2227 Unsqueeze_2035" [id=2227, type=Unsqueeze]; -"2228 Constant_2036" [id=2228, type=Constant]; -"2229 Slice_2037" [id=2229, type=Slice]; -"2230 Mul_2038" [id=2230, type=Mul]; -"2231 Constant_2039" [id=2231, type=Constant]; -"2232 Sub_2040" [id=2232, type=Sub]; -"2233 Constant_2041" [id=2233, type=Constant]; -"2234 Mul_2042" [id=2234, type=Mul]; -"2235 Sub_2043" [id=2235, type=Sub]; -"2236 Softmax_2044" [id=2236, type=Softmax]; -"2237 MatMul_2045" [id=2237, type=MatMul]; -"2238 QuantizeLinear_2502_1" [id=2238, type=QuantizeLinear]; -"2239 DequantizeLinear_2502_1" [id=2239, type=DequantizeLinear]; -"2240 Transpose_2046" [id=2240, type=Transpose]; -"2241 Shape_2047" [id=2241, type=Shape]; -"2242 Constant_2048" [id=2242, type=Constant]; -"2243 Gather_2049" [id=2243, type=Gather]; -"2244 Shape_2050" [id=2244, type=Shape]; -"2245 Constant_2051" [id=2245, type=Constant]; -"2246 Gather_2052" [id=2246, type=Gather]; -"2247 Shape_2053" [id=2247, type=Shape]; -"2248 Constant_2054" [id=2248, type=Constant]; -"2249 Gather_2055" [id=2249, type=Gather]; -"2250 Shape_2056" [id=2250, type=Shape]; -"2251 Constant_2057" [id=2251, type=Constant]; -"2252 Gather_2058" [id=2252, type=Gather]; -"2253 Mul_2059" [id=2253, type=Mul]; -"2254 Unsqueeze_2060" [id=2254, type=Unsqueeze]; -"2255 Unsqueeze_2061" [id=2255, type=Unsqueeze]; -"2256 Unsqueeze_2062" [id=2256, type=Unsqueeze]; -"2257 Concat_2063" [id=2257, type=Concat]; -"2258 Reshape_2064" [id=2258, type=Reshape]; -"2259 Shape_2065" [id=2259, type=Shape]; -"2260 Constant_2066" [id=2260, type=Constant]; -"2261 Gather_2067" [id=2261, type=Gather]; -"2262 Shape_2068" [id=2262, type=Shape]; -"2263 Constant_2069" [id=2263, type=Constant]; -"2264 Gather_2070" [id=2264, type=Gather]; -"2265 Shape_2071" [id=2265, type=Shape]; -"2266 Constant_2072" [id=2266, type=Constant]; -"2267 Gather_2073" [id=2267, type=Gather]; -"2268 Unsqueeze_2074" [id=2268, type=Unsqueeze]; -"2269 Concat_2075" [id=2269, type=Concat]; -"2270 Reshape_2076" [id=2270, type=Reshape]; -"2271 QuantizeLinear_h.9.attn.c_proj.weight_1" [id=2271, type=QuantizeLinear]; -"2272 DequantizeLinear_h.9.attn.c_proj.weight_1" [id=2272, type=DequantizeLinear]; -"2273 Gemm_2077" [id=2273, type=Gemm]; -"2274 Unsqueeze_2078" [id=2274, type=Unsqueeze]; -"2275 Unsqueeze_2079" [id=2275, type=Unsqueeze]; -"2276 Concat_2080" [id=2276, type=Concat]; -"2277 Reshape_2081" [id=2277, type=Reshape]; -"2278 Add_2082" [id=2278, type=Add]; -"2279 ReduceMean_2083" [id=2279, type=ReduceMean]; -"2280 Sub_2084" [id=2280, type=Sub]; -"2281 Constant_2085" [id=2281, type=Constant]; -"2282 Pow_2086" [id=2282, type=Pow]; -"2283 ReduceMean_2087" [id=2283, type=ReduceMean]; -"2284 Constant_2088" [id=2284, type=Constant]; -"2285 Add_2089" [id=2285, type=Add]; -"2286 Sqrt_2090" [id=2286, type=Sqrt]; -"2287 Div_2091" [id=2287, type=Div]; -"2288 Mul_2092" [id=2288, type=Mul]; -"2289 Add_2093" [id=2289, type=Add]; -"2290 QuantizeLinear_2554_1" [id=2290, type=QuantizeLinear]; -"2291 DequantizeLinear_2554_1" [id=2291, type=DequantizeLinear]; -"2292 Shape_2094" [id=2292, type=Shape]; -"2293 Constant_2095" [id=2293, type=Constant]; -"2294 Gather_2096" [id=2294, type=Gather]; -"2295 Shape_2097" [id=2295, type=Shape]; -"2296 Constant_2098" [id=2296, type=Constant]; -"2297 Gather_2099" [id=2297, type=Gather]; -"2298 Shape_2100" [id=2298, type=Shape]; -"2299 Constant_2101" [id=2299, type=Constant]; -"2300 Gather_2102" [id=2300, type=Gather]; -"2301 Unsqueeze_2103" [id=2301, type=Unsqueeze]; -"2302 Concat_2104" [id=2302, type=Concat]; -"2303 Reshape_2105" [id=2303, type=Reshape]; -"2304 QuantizeLinear_h.9.mlp.c_fc.weight_1" [id=2304, type=QuantizeLinear]; -"2305 DequantizeLinear_h.9.mlp.c_fc.weight_1" [id=2305, type=DequantizeLinear]; -"2306 Gemm_2106" [id=2306, type=Gemm]; -"2307 Unsqueeze_2107" [id=2307, type=Unsqueeze]; -"2308 Unsqueeze_2108" [id=2308, type=Unsqueeze]; -"2309 Concat_2109" [id=2309, type=Concat]; -"2310 Reshape_2110" [id=2310, type=Reshape]; -"2311 Constant_2111" [id=2311, type=Constant]; -"2312 Mul_2112" [id=2312, type=Mul]; -"2313 Constant_2113" [id=2313, type=Constant]; -"2314 Pow_2114" [id=2314, type=Pow]; -"2315 Constant_2115" [id=2315, type=Constant]; -"2316 Mul_2116" [id=2316, type=Mul]; -"2317 Add_2117" [id=2317, type=Add]; -"2318 Constant_2118" [id=2318, type=Constant]; -"2319 Mul_2119" [id=2319, type=Mul]; -"2320 Tanh_2120" [id=2320, type=Tanh]; -"2321 Constant_2121" [id=2321, type=Constant]; -"2322 Add_2122" [id=2322, type=Add]; -"2323 Mul_2123" [id=2323, type=Mul]; -"2324 QuantizeLinear_2588_1" [id=2324, type=QuantizeLinear]; -"2325 DequantizeLinear_2588_1" [id=2325, type=DequantizeLinear]; -"2326 Shape_2124" [id=2326, type=Shape]; -"2327 Constant_2125" [id=2327, type=Constant]; -"2328 Gather_2126" [id=2328, type=Gather]; -"2329 Shape_2127" [id=2329, type=Shape]; -"2330 Constant_2128" [id=2330, type=Constant]; -"2331 Gather_2129" [id=2331, type=Gather]; -"2332 Shape_2130" [id=2332, type=Shape]; -"2333 Constant_2131" [id=2333, type=Constant]; -"2334 Gather_2132" [id=2334, type=Gather]; -"2335 Unsqueeze_2133" [id=2335, type=Unsqueeze]; -"2336 Concat_2134" [id=2336, type=Concat]; -"2337 Reshape_2135" [id=2337, type=Reshape]; -"2338 QuantizeLinear_h.9.mlp.c_proj.weight_1" [id=2338, type=QuantizeLinear]; -"2339 DequantizeLinear_h.9.mlp.c_proj.weight_1" [id=2339, type=DequantizeLinear]; -"2340 Gemm_2136" [id=2340, type=Gemm]; -"2341 Unsqueeze_2137" [id=2341, type=Unsqueeze]; -"2342 Unsqueeze_2138" [id=2342, type=Unsqueeze]; -"2343 Concat_2139" [id=2343, type=Concat]; -"2344 Reshape_2140" [id=2344, type=Reshape]; -"2345 Add_2141" [id=2345, type=Add]; -"2346 ReduceMean_2142" [id=2346, type=ReduceMean]; -"2347 Sub_2143" [id=2347, type=Sub]; -"2348 Constant_2144" [id=2348, type=Constant]; -"2349 Pow_2145" [id=2349, type=Pow]; -"2350 ReduceMean_2146" [id=2350, type=ReduceMean]; -"2351 Constant_2147" [id=2351, type=Constant]; -"2352 Add_2148" [id=2352, type=Add]; -"2353 Sqrt_2149" [id=2353, type=Sqrt]; -"2354 Div_2150" [id=2354, type=Div]; -"2355 Mul_2151" [id=2355, type=Mul]; -"2356 Add_2152" [id=2356, type=Add]; -"2357 QuantizeLinear_2621_1" [id=2357, type=QuantizeLinear]; -"2358 DequantizeLinear_2621_1" [id=2358, type=DequantizeLinear]; -"2359 Shape_2153" [id=2359, type=Shape]; -"2360 Constant_2154" [id=2360, type=Constant]; -"2361 Gather_2155" [id=2361, type=Gather]; -"2362 Shape_2156" [id=2362, type=Shape]; -"2363 Constant_2157" [id=2363, type=Constant]; -"2364 Gather_2158" [id=2364, type=Gather]; -"2365 Shape_2159" [id=2365, type=Shape]; -"2366 Constant_2160" [id=2366, type=Constant]; -"2367 Gather_2161" [id=2367, type=Gather]; -"2368 Unsqueeze_2162" [id=2368, type=Unsqueeze]; -"2369 Concat_2163" [id=2369, type=Concat]; -"2370 Reshape_2164" [id=2370, type=Reshape]; -"2371 QuantizeLinear_h.10.attn.c_attn.weight_1" [id=2371, type=QuantizeLinear]; -"2372 DequantizeLinear_h.10.attn.c_attn.weight_1" [id=2372, type=DequantizeLinear]; -"2373 Gemm_2165" [id=2373, type=Gemm]; -"2374 Unsqueeze_2166" [id=2374, type=Unsqueeze]; -"2375 Unsqueeze_2167" [id=2375, type=Unsqueeze]; -"2376 Concat_2168" [id=2376, type=Concat]; -"2377 Reshape_2169" [id=2377, type=Reshape]; -"2378 Split_2170" [id=2378, type=Split]; -"2379 QuantizeLinear_query.21_1" [id=2379, type=QuantizeLinear]; -"2380 DequantizeLinear_query.21_1" [id=2380, type=DequantizeLinear]; -"2381 Shape_2171" [id=2381, type=Shape]; -"2382 Constant_2172" [id=2382, type=Constant]; -"2383 Gather_2173" [id=2383, type=Gather]; -"2384 Shape_2174" [id=2384, type=Shape]; -"2385 Constant_2175" [id=2385, type=Constant]; -"2386 Gather_2176" [id=2386, type=Gather]; -"2387 Shape_2177" [id=2387, type=Shape]; -"2388 Constant_2178" [id=2388, type=Constant]; -"2389 Gather_2179" [id=2389, type=Gather]; -"2390 Constant_2180" [id=2390, type=Constant]; -"2391 Div_2181" [id=2391, type=Div]; -"2392 Cast_2182" [id=2392, type=Cast]; -"2393 Cast_2183" [id=2393, type=Cast]; -"2394 Unsqueeze_2184" [id=2394, type=Unsqueeze]; -"2395 Unsqueeze_2185" [id=2395, type=Unsqueeze]; -"2396 Unsqueeze_2186" [id=2396, type=Unsqueeze]; -"2397 Concat_2187" [id=2397, type=Concat]; -"2398 Reshape_2188" [id=2398, type=Reshape]; -"2399 Transpose_2189" [id=2399, type=Transpose]; -"2400 Shape_2190" [id=2400, type=Shape]; -"2401 Constant_2191" [id=2401, type=Constant]; -"2402 Gather_2192" [id=2402, type=Gather]; -"2403 Shape_2193" [id=2403, type=Shape]; -"2404 Constant_2194" [id=2404, type=Constant]; -"2405 Gather_2195" [id=2405, type=Gather]; -"2406 Shape_2196" [id=2406, type=Shape]; -"2407 Constant_2197" [id=2407, type=Constant]; -"2408 Gather_2198" [id=2408, type=Gather]; -"2409 Constant_2199" [id=2409, type=Constant]; -"2410 Div_2200" [id=2410, type=Div]; -"2411 Cast_2201" [id=2411, type=Cast]; -"2412 Cast_2202" [id=2412, type=Cast]; -"2413 Unsqueeze_2203" [id=2413, type=Unsqueeze]; -"2414 Unsqueeze_2204" [id=2414, type=Unsqueeze]; -"2415 Unsqueeze_2205" [id=2415, type=Unsqueeze]; -"2416 Concat_2206" [id=2416, type=Concat]; -"2417 Reshape_2207" [id=2417, type=Reshape]; -"2418 QuantizeLinear_2686_1" [id=2418, type=QuantizeLinear]; -"2419 DequantizeLinear_2686_1" [id=2419, type=DequantizeLinear]; -"2420 Transpose_2208" [id=2420, type=Transpose]; -"2421 Shape_2209" [id=2421, type=Shape]; -"2422 Constant_2210" [id=2422, type=Constant]; -"2423 Gather_2211" [id=2423, type=Gather]; -"2424 Shape_2212" [id=2424, type=Shape]; -"2425 Constant_2213" [id=2425, type=Constant]; -"2426 Gather_2214" [id=2426, type=Gather]; -"2427 Shape_2215" [id=2427, type=Shape]; -"2428 Constant_2216" [id=2428, type=Constant]; -"2429 Gather_2217" [id=2429, type=Gather]; -"2430 Constant_2218" [id=2430, type=Constant]; -"2431 Div_2219" [id=2431, type=Div]; -"2432 Cast_2220" [id=2432, type=Cast]; -"2433 Cast_2221" [id=2433, type=Cast]; -"2434 Unsqueeze_2222" [id=2434, type=Unsqueeze]; -"2435 Unsqueeze_2223" [id=2435, type=Unsqueeze]; -"2436 Unsqueeze_2224" [id=2436, type=Unsqueeze]; -"2437 Concat_2225" [id=2437, type=Concat]; -"2438 Reshape_2226" [id=2438, type=Reshape]; -"2439 Transpose_2227" [id=2439, type=Transpose]; -"2440 Transpose_2228" [id=2440, type=Transpose]; -"2441 Unsqueeze_2229" [id=2441, type=Unsqueeze]; -"2442 Unsqueeze_2230" [id=2442, type=Unsqueeze]; -"2443 Concat_2231" [id=2443, type=Concat]; -"2444 MatMul_2232" [id=2444, type=MatMul]; -"2445 Constant_2233" [id=2445, type=Constant]; -"2446 Div_2234" [id=2446, type=Div]; -"2447 Shape_2235" [id=2447, type=Shape]; -"2448 Constant_2236" [id=2448, type=Constant]; -"2449 Gather_2237" [id=2449, type=Gather]; -"2450 Shape_2238" [id=2450, type=Shape]; -"2451 Constant_2239" [id=2451, type=Constant]; -"2452 Gather_2240" [id=2452, type=Gather]; -"2453 Sub_2241" [id=2453, type=Sub]; -"2454 Unsqueeze_2242" [id=2454, type=Unsqueeze]; -"2455 Unsqueeze_2243" [id=2455, type=Unsqueeze]; -"2456 Constant_2244" [id=2456, type=Constant]; -"2457 Slice_2245" [id=2457, type=Slice]; -"2458 Unsqueeze_2246" [id=2458, type=Unsqueeze]; -"2459 Constant_2247" [id=2459, type=Constant]; -"2460 Slice_2248" [id=2460, type=Slice]; -"2461 Mul_2249" [id=2461, type=Mul]; -"2462 Constant_2250" [id=2462, type=Constant]; -"2463 Sub_2251" [id=2463, type=Sub]; -"2464 Constant_2252" [id=2464, type=Constant]; -"2465 Mul_2253" [id=2465, type=Mul]; -"2466 Sub_2254" [id=2466, type=Sub]; -"2467 Softmax_2255" [id=2467, type=Softmax]; -"2468 MatMul_2256" [id=2468, type=MatMul]; -"2469 QuantizeLinear_2743_1" [id=2469, type=QuantizeLinear]; -"2470 DequantizeLinear_2743_1" [id=2470, type=DequantizeLinear]; -"2471 Transpose_2257" [id=2471, type=Transpose]; -"2472 Shape_2258" [id=2472, type=Shape]; -"2473 Constant_2259" [id=2473, type=Constant]; -"2474 Gather_2260" [id=2474, type=Gather]; -"2475 Shape_2261" [id=2475, type=Shape]; -"2476 Constant_2262" [id=2476, type=Constant]; -"2477 Gather_2263" [id=2477, type=Gather]; -"2478 Shape_2264" [id=2478, type=Shape]; -"2479 Constant_2265" [id=2479, type=Constant]; -"2480 Gather_2266" [id=2480, type=Gather]; -"2481 Shape_2267" [id=2481, type=Shape]; -"2482 Constant_2268" [id=2482, type=Constant]; -"2483 Gather_2269" [id=2483, type=Gather]; -"2484 Mul_2270" [id=2484, type=Mul]; -"2485 Unsqueeze_2271" [id=2485, type=Unsqueeze]; -"2486 Unsqueeze_2272" [id=2486, type=Unsqueeze]; -"2487 Unsqueeze_2273" [id=2487, type=Unsqueeze]; -"2488 Concat_2274" [id=2488, type=Concat]; -"2489 Reshape_2275" [id=2489, type=Reshape]; -"2490 Shape_2276" [id=2490, type=Shape]; -"2491 Constant_2277" [id=2491, type=Constant]; -"2492 Gather_2278" [id=2492, type=Gather]; -"2493 Shape_2279" [id=2493, type=Shape]; -"2494 Constant_2280" [id=2494, type=Constant]; -"2495 Gather_2281" [id=2495, type=Gather]; -"2496 Shape_2282" [id=2496, type=Shape]; -"2497 Constant_2283" [id=2497, type=Constant]; -"2498 Gather_2284" [id=2498, type=Gather]; -"2499 Unsqueeze_2285" [id=2499, type=Unsqueeze]; -"2500 Concat_2286" [id=2500, type=Concat]; -"2501 Reshape_2287" [id=2501, type=Reshape]; -"2502 QuantizeLinear_h.10.attn.c_proj.weight_1" [id=2502, type=QuantizeLinear]; -"2503 DequantizeLinear_h.10.attn.c_proj.weight_1" [id=2503, type=DequantizeLinear]; -"2504 Gemm_2288" [id=2504, type=Gemm]; -"2505 Unsqueeze_2289" [id=2505, type=Unsqueeze]; -"2506 Unsqueeze_2290" [id=2506, type=Unsqueeze]; -"2507 Concat_2291" [id=2507, type=Concat]; -"2508 Reshape_2292" [id=2508, type=Reshape]; -"2509 Add_2293" [id=2509, type=Add]; -"2510 ReduceMean_2294" [id=2510, type=ReduceMean]; -"2511 Sub_2295" [id=2511, type=Sub]; -"2512 Constant_2296" [id=2512, type=Constant]; -"2513 Pow_2297" [id=2513, type=Pow]; -"2514 ReduceMean_2298" [id=2514, type=ReduceMean]; -"2515 Constant_2299" [id=2515, type=Constant]; -"2516 Add_2300" [id=2516, type=Add]; -"2517 Sqrt_2301" [id=2517, type=Sqrt]; -"2518 Div_2302" [id=2518, type=Div]; -"2519 Mul_2303" [id=2519, type=Mul]; -"2520 Add_2304" [id=2520, type=Add]; -"2521 QuantizeLinear_2795_1" [id=2521, type=QuantizeLinear]; -"2522 DequantizeLinear_2795_1" [id=2522, type=DequantizeLinear]; -"2523 Shape_2305" [id=2523, type=Shape]; -"2524 Constant_2306" [id=2524, type=Constant]; -"2525 Gather_2307" [id=2525, type=Gather]; -"2526 Shape_2308" [id=2526, type=Shape]; -"2527 Constant_2309" [id=2527, type=Constant]; -"2528 Gather_2310" [id=2528, type=Gather]; -"2529 Shape_2311" [id=2529, type=Shape]; -"2530 Constant_2312" [id=2530, type=Constant]; -"2531 Gather_2313" [id=2531, type=Gather]; -"2532 Unsqueeze_2314" [id=2532, type=Unsqueeze]; -"2533 Concat_2315" [id=2533, type=Concat]; -"2534 Reshape_2316" [id=2534, type=Reshape]; -"2535 QuantizeLinear_h.10.mlp.c_fc.weight_1" [id=2535, type=QuantizeLinear]; -"2536 DequantizeLinear_h.10.mlp.c_fc.weight_1" [id=2536, type=DequantizeLinear]; -"2537 Gemm_2317" [id=2537, type=Gemm]; -"2538 Unsqueeze_2318" [id=2538, type=Unsqueeze]; -"2539 Unsqueeze_2319" [id=2539, type=Unsqueeze]; -"2540 Concat_2320" [id=2540, type=Concat]; -"2541 Reshape_2321" [id=2541, type=Reshape]; -"2542 Constant_2322" [id=2542, type=Constant]; -"2543 Mul_2323" [id=2543, type=Mul]; -"2544 Constant_2324" [id=2544, type=Constant]; -"2545 Pow_2325" [id=2545, type=Pow]; -"2546 Constant_2326" [id=2546, type=Constant]; -"2547 Mul_2327" [id=2547, type=Mul]; -"2548 Add_2328" [id=2548, type=Add]; -"2549 Constant_2329" [id=2549, type=Constant]; -"2550 Mul_2330" [id=2550, type=Mul]; -"2551 Tanh_2331" [id=2551, type=Tanh]; -"2552 Constant_2332" [id=2552, type=Constant]; -"2553 Add_2333" [id=2553, type=Add]; -"2554 Mul_2334" [id=2554, type=Mul]; -"2555 QuantizeLinear_2829_1" [id=2555, type=QuantizeLinear]; -"2556 DequantizeLinear_2829_1" [id=2556, type=DequantizeLinear]; -"2557 Shape_2335" [id=2557, type=Shape]; -"2558 Constant_2336" [id=2558, type=Constant]; -"2559 Gather_2337" [id=2559, type=Gather]; -"2560 Shape_2338" [id=2560, type=Shape]; -"2561 Constant_2339" [id=2561, type=Constant]; -"2562 Gather_2340" [id=2562, type=Gather]; -"2563 Shape_2341" [id=2563, type=Shape]; -"2564 Constant_2342" [id=2564, type=Constant]; -"2565 Gather_2343" [id=2565, type=Gather]; -"2566 Unsqueeze_2344" [id=2566, type=Unsqueeze]; -"2567 Concat_2345" [id=2567, type=Concat]; -"2568 Reshape_2346" [id=2568, type=Reshape]; -"2569 QuantizeLinear_h.10.mlp.c_proj.weight_1" [id=2569, type=QuantizeLinear]; -"2570 DequantizeLinear_h.10.mlp.c_proj.weight_1" [id=2570, type=DequantizeLinear]; -"2571 Gemm_2347" [id=2571, type=Gemm]; -"2572 Unsqueeze_2348" [id=2572, type=Unsqueeze]; -"2573 Unsqueeze_2349" [id=2573, type=Unsqueeze]; -"2574 Concat_2350" [id=2574, type=Concat]; -"2575 Reshape_2351" [id=2575, type=Reshape]; -"2576 Add_2352" [id=2576, type=Add]; -"2577 ReduceMean_2353" [id=2577, type=ReduceMean]; -"2578 Sub_2354" [id=2578, type=Sub]; -"2579 Constant_2355" [id=2579, type=Constant]; -"2580 Pow_2356" [id=2580, type=Pow]; -"2581 ReduceMean_2357" [id=2581, type=ReduceMean]; -"2582 Constant_2358" [id=2582, type=Constant]; -"2583 Add_2359" [id=2583, type=Add]; -"2584 Sqrt_2360" [id=2584, type=Sqrt]; -"2585 Div_2361" [id=2585, type=Div]; -"2586 Mul_2362" [id=2586, type=Mul]; -"2587 Add_2363" [id=2587, type=Add]; -"2588 QuantizeLinear_2862_1" [id=2588, type=QuantizeLinear]; -"2589 DequantizeLinear_2862_1" [id=2589, type=DequantizeLinear]; -"2590 Shape_2364" [id=2590, type=Shape]; -"2591 Constant_2365" [id=2591, type=Constant]; -"2592 Gather_2366" [id=2592, type=Gather]; -"2593 Shape_2367" [id=2593, type=Shape]; -"2594 Constant_2368" [id=2594, type=Constant]; -"2595 Gather_2369" [id=2595, type=Gather]; -"2596 Shape_2370" [id=2596, type=Shape]; -"2597 Constant_2371" [id=2597, type=Constant]; -"2598 Gather_2372" [id=2598, type=Gather]; -"2599 Unsqueeze_2373" [id=2599, type=Unsqueeze]; -"2600 Concat_2374" [id=2600, type=Concat]; -"2601 Reshape_2375" [id=2601, type=Reshape]; -"2602 QuantizeLinear_h.11.attn.c_attn.weight_1" [id=2602, type=QuantizeLinear]; -"2603 DequantizeLinear_h.11.attn.c_attn.weight_1" [id=2603, type=DequantizeLinear]; -"2604 Gemm_2376" [id=2604, type=Gemm]; -"2605 Unsqueeze_2377" [id=2605, type=Unsqueeze]; -"2606 Unsqueeze_2378" [id=2606, type=Unsqueeze]; -"2607 Concat_2379" [id=2607, type=Concat]; -"2608 Reshape_2380" [id=2608, type=Reshape]; -"2609 Split_2381" [id=2609, type=Split]; -"2610 QuantizeLinear_query.23_1" [id=2610, type=QuantizeLinear]; -"2611 DequantizeLinear_query.23_1" [id=2611, type=DequantizeLinear]; -"2612 Shape_2382" [id=2612, type=Shape]; -"2613 Constant_2383" [id=2613, type=Constant]; -"2614 Gather_2384" [id=2614, type=Gather]; -"2615 Shape_2385" [id=2615, type=Shape]; -"2616 Constant_2386" [id=2616, type=Constant]; -"2617 Gather_2387" [id=2617, type=Gather]; -"2618 Shape_2388" [id=2618, type=Shape]; -"2619 Constant_2389" [id=2619, type=Constant]; -"2620 Gather_2390" [id=2620, type=Gather]; -"2621 Constant_2391" [id=2621, type=Constant]; -"2622 Div_2392" [id=2622, type=Div]; -"2623 Cast_2393" [id=2623, type=Cast]; -"2624 Cast_2394" [id=2624, type=Cast]; -"2625 Unsqueeze_2395" [id=2625, type=Unsqueeze]; -"2626 Unsqueeze_2396" [id=2626, type=Unsqueeze]; -"2627 Unsqueeze_2397" [id=2627, type=Unsqueeze]; -"2628 Concat_2398" [id=2628, type=Concat]; -"2629 Reshape_2399" [id=2629, type=Reshape]; -"2630 Transpose_2400" [id=2630, type=Transpose]; -"2631 Shape_2401" [id=2631, type=Shape]; -"2632 Constant_2402" [id=2632, type=Constant]; -"2633 Gather_2403" [id=2633, type=Gather]; -"2634 Shape_2404" [id=2634, type=Shape]; -"2635 Constant_2405" [id=2635, type=Constant]; -"2636 Gather_2406" [id=2636, type=Gather]; -"2637 Shape_2407" [id=2637, type=Shape]; -"2638 Constant_2408" [id=2638, type=Constant]; -"2639 Gather_2409" [id=2639, type=Gather]; -"2640 Constant_2410" [id=2640, type=Constant]; -"2641 Div_2411" [id=2641, type=Div]; -"2642 Cast_2412" [id=2642, type=Cast]; -"2643 Cast_2413" [id=2643, type=Cast]; -"2644 Unsqueeze_2414" [id=2644, type=Unsqueeze]; -"2645 Unsqueeze_2415" [id=2645, type=Unsqueeze]; -"2646 Unsqueeze_2416" [id=2646, type=Unsqueeze]; -"2647 Concat_2417" [id=2647, type=Concat]; -"2648 Reshape_2418" [id=2648, type=Reshape]; -"2649 QuantizeLinear_2927_1" [id=2649, type=QuantizeLinear]; -"2650 DequantizeLinear_2927_1" [id=2650, type=DequantizeLinear]; -"2651 Transpose_2419" [id=2651, type=Transpose]; -"2652 Shape_2420" [id=2652, type=Shape]; -"2653 Constant_2421" [id=2653, type=Constant]; -"2654 Gather_2422" [id=2654, type=Gather]; -"2655 Shape_2423" [id=2655, type=Shape]; -"2656 Constant_2424" [id=2656, type=Constant]; -"2657 Gather_2425" [id=2657, type=Gather]; -"2658 Shape_2426" [id=2658, type=Shape]; -"2659 Constant_2427" [id=2659, type=Constant]; -"2660 Gather_2428" [id=2660, type=Gather]; -"2661 Constant_2429" [id=2661, type=Constant]; -"2662 Div_2430" [id=2662, type=Div]; -"2663 Cast_2431" [id=2663, type=Cast]; -"2664 Cast_2432" [id=2664, type=Cast]; -"2665 Unsqueeze_2433" [id=2665, type=Unsqueeze]; -"2666 Unsqueeze_2434" [id=2666, type=Unsqueeze]; -"2667 Unsqueeze_2435" [id=2667, type=Unsqueeze]; -"2668 Concat_2436" [id=2668, type=Concat]; -"2669 Reshape_2437" [id=2669, type=Reshape]; -"2670 Transpose_2438" [id=2670, type=Transpose]; -"2671 Transpose_2439" [id=2671, type=Transpose]; -"2672 Unsqueeze_2440" [id=2672, type=Unsqueeze]; -"2673 Unsqueeze_2441" [id=2673, type=Unsqueeze]; -"2674 Concat_2442" [id=2674, type=Concat]; -"2675 MatMul_2443" [id=2675, type=MatMul]; -"2676 Constant_2444" [id=2676, type=Constant]; -"2677 Div_2445" [id=2677, type=Div]; -"2678 Shape_2446" [id=2678, type=Shape]; -"2679 Constant_2447" [id=2679, type=Constant]; -"2680 Gather_2448" [id=2680, type=Gather]; -"2681 Shape_2449" [id=2681, type=Shape]; -"2682 Constant_2450" [id=2682, type=Constant]; -"2683 Gather_2451" [id=2683, type=Gather]; -"2684 Sub_2452" [id=2684, type=Sub]; -"2685 Unsqueeze_2453" [id=2685, type=Unsqueeze]; -"2686 Unsqueeze_2454" [id=2686, type=Unsqueeze]; -"2687 Constant_2455" [id=2687, type=Constant]; -"2688 Slice_2456" [id=2688, type=Slice]; -"2689 Unsqueeze_2457" [id=2689, type=Unsqueeze]; -"2690 Constant_2458" [id=2690, type=Constant]; -"2691 Slice_2459" [id=2691, type=Slice]; -"2692 Mul_2460" [id=2692, type=Mul]; -"2693 Constant_2461" [id=2693, type=Constant]; -"2694 Sub_2462" [id=2694, type=Sub]; -"2695 Constant_2463" [id=2695, type=Constant]; -"2696 Mul_2464" [id=2696, type=Mul]; -"2697 Sub_2465" [id=2697, type=Sub]; -"2698 Softmax_2466" [id=2698, type=Softmax]; -"2699 MatMul_2467" [id=2699, type=MatMul]; -"2700 QuantizeLinear_2984_1" [id=2700, type=QuantizeLinear]; -"2701 DequantizeLinear_2984_1" [id=2701, type=DequantizeLinear]; -"2702 Transpose_2468" [id=2702, type=Transpose]; -"2703 Shape_2469" [id=2703, type=Shape]; -"2704 Constant_2470" [id=2704, type=Constant]; -"2705 Gather_2471" [id=2705, type=Gather]; -"2706 Shape_2472" [id=2706, type=Shape]; -"2707 Constant_2473" [id=2707, type=Constant]; -"2708 Gather_2474" [id=2708, type=Gather]; -"2709 Shape_2475" [id=2709, type=Shape]; -"2710 Constant_2476" [id=2710, type=Constant]; -"2711 Gather_2477" [id=2711, type=Gather]; -"2712 Shape_2478" [id=2712, type=Shape]; -"2713 Constant_2479" [id=2713, type=Constant]; -"2714 Gather_2480" [id=2714, type=Gather]; -"2715 Mul_2481" [id=2715, type=Mul]; -"2716 Unsqueeze_2482" [id=2716, type=Unsqueeze]; -"2717 Unsqueeze_2483" [id=2717, type=Unsqueeze]; -"2718 Unsqueeze_2484" [id=2718, type=Unsqueeze]; -"2719 Concat_2485" [id=2719, type=Concat]; -"2720 Reshape_2486" [id=2720, type=Reshape]; -"2721 Shape_2487" [id=2721, type=Shape]; -"2722 Constant_2488" [id=2722, type=Constant]; -"2723 Gather_2489" [id=2723, type=Gather]; -"2724 Shape_2490" [id=2724, type=Shape]; -"2725 Constant_2491" [id=2725, type=Constant]; -"2726 Gather_2492" [id=2726, type=Gather]; -"2727 Shape_2493" [id=2727, type=Shape]; -"2728 Constant_2494" [id=2728, type=Constant]; -"2729 Gather_2495" [id=2729, type=Gather]; -"2730 Unsqueeze_2496" [id=2730, type=Unsqueeze]; -"2731 Concat_2497" [id=2731, type=Concat]; -"2732 Reshape_2498" [id=2732, type=Reshape]; -"2733 QuantizeLinear_h.11.attn.c_proj.weight_1" [id=2733, type=QuantizeLinear]; -"2734 DequantizeLinear_h.11.attn.c_proj.weight_1" [id=2734, type=DequantizeLinear]; -"2735 Gemm_2499" [id=2735, type=Gemm]; -"2736 Unsqueeze_2500" [id=2736, type=Unsqueeze]; -"2737 Unsqueeze_2501" [id=2737, type=Unsqueeze]; -"2738 Concat_2502" [id=2738, type=Concat]; -"2739 Reshape_2503" [id=2739, type=Reshape]; -"2740 Add_2504" [id=2740, type=Add]; -"2741 ReduceMean_2505" [id=2741, type=ReduceMean]; -"2742 Sub_2506" [id=2742, type=Sub]; -"2743 Constant_2507" [id=2743, type=Constant]; -"2744 Pow_2508" [id=2744, type=Pow]; -"2745 ReduceMean_2509" [id=2745, type=ReduceMean]; -"2746 Constant_2510" [id=2746, type=Constant]; -"2747 Add_2511" [id=2747, type=Add]; -"2748 Sqrt_2512" [id=2748, type=Sqrt]; -"2749 Div_2513" [id=2749, type=Div]; -"2750 Mul_2514" [id=2750, type=Mul]; -"2751 Add_2515" [id=2751, type=Add]; -"2752 QuantizeLinear_3036_1" [id=2752, type=QuantizeLinear]; -"2753 DequantizeLinear_3036_1" [id=2753, type=DequantizeLinear]; -"2754 Shape_2516" [id=2754, type=Shape]; -"2755 Constant_2517" [id=2755, type=Constant]; -"2756 Gather_2518" [id=2756, type=Gather]; -"2757 Shape_2519" [id=2757, type=Shape]; -"2758 Constant_2520" [id=2758, type=Constant]; -"2759 Gather_2521" [id=2759, type=Gather]; -"2760 Shape_2522" [id=2760, type=Shape]; -"2761 Constant_2523" [id=2761, type=Constant]; -"2762 Gather_2524" [id=2762, type=Gather]; -"2763 Unsqueeze_2525" [id=2763, type=Unsqueeze]; -"2764 Concat_2526" [id=2764, type=Concat]; -"2765 Reshape_2527" [id=2765, type=Reshape]; -"2766 QuantizeLinear_h.11.mlp.c_fc.weight_1" [id=2766, type=QuantizeLinear]; -"2767 DequantizeLinear_h.11.mlp.c_fc.weight_1" [id=2767, type=DequantizeLinear]; -"2768 Gemm_2528" [id=2768, type=Gemm]; -"2769 Unsqueeze_2529" [id=2769, type=Unsqueeze]; -"2770 Unsqueeze_2530" [id=2770, type=Unsqueeze]; -"2771 Concat_2531" [id=2771, type=Concat]; -"2772 Reshape_2532" [id=2772, type=Reshape]; -"2773 Constant_2533" [id=2773, type=Constant]; -"2774 Mul_2534" [id=2774, type=Mul]; -"2775 Constant_2535" [id=2775, type=Constant]; -"2776 Pow_2536" [id=2776, type=Pow]; -"2777 Constant_2537" [id=2777, type=Constant]; -"2778 Mul_2538" [id=2778, type=Mul]; -"2779 Add_2539" [id=2779, type=Add]; -"2780 Constant_2540" [id=2780, type=Constant]; -"2781 Mul_2541" [id=2781, type=Mul]; -"2782 Tanh_2542" [id=2782, type=Tanh]; -"2783 Constant_2543" [id=2783, type=Constant]; -"2784 Add_2544" [id=2784, type=Add]; -"2785 Mul_2545" [id=2785, type=Mul]; -"2786 QuantizeLinear_3070_1" [id=2786, type=QuantizeLinear]; -"2787 DequantizeLinear_3070_1" [id=2787, type=DequantizeLinear]; -"2788 Shape_2546" [id=2788, type=Shape]; -"2789 Constant_2547" [id=2789, type=Constant]; -"2790 Gather_2548" [id=2790, type=Gather]; -"2791 Shape_2549" [id=2791, type=Shape]; -"2792 Constant_2550" [id=2792, type=Constant]; -"2793 Gather_2551" [id=2793, type=Gather]; -"2794 Shape_2552" [id=2794, type=Shape]; -"2795 Constant_2553" [id=2795, type=Constant]; -"2796 Gather_2554" [id=2796, type=Gather]; -"2797 Unsqueeze_2555" [id=2797, type=Unsqueeze]; -"2798 Concat_2556" [id=2798, type=Concat]; -"2799 Reshape_2557" [id=2799, type=Reshape]; -"2800 QuantizeLinear_h.11.mlp.c_proj.weight_1" [id=2800, type=QuantizeLinear]; -"2801 DequantizeLinear_h.11.mlp.c_proj.weight_1" [id=2801, type=DequantizeLinear]; -"2802 Gemm_2558" [id=2802, type=Gemm]; -"2803 Unsqueeze_2559" [id=2803, type=Unsqueeze]; -"2804 Unsqueeze_2560" [id=2804, type=Unsqueeze]; -"2805 Concat_2561" [id=2805, type=Concat]; -"2806 Reshape_2562" [id=2806, type=Reshape]; -"2807 Add_2563" [id=2807, type=Add]; -"2808 ReduceMean_2564" [id=2808, type=ReduceMean]; -"2809 Sub_2565" [id=2809, type=Sub]; -"2810 Constant_2566" [id=2810, type=Constant]; -"2811 Pow_2567" [id=2811, type=Pow]; -"2812 ReduceMean_2568" [id=2812, type=ReduceMean]; -"2813 Constant_2569" [id=2813, type=Constant]; -"2814 Add_2570" [id=2814, type=Add]; -"2815 Sqrt_2571" [id=2815, type=Sqrt]; -"2816 Div_2572" [id=2816, type=Div]; -"2817 Mul_2573" [id=2817, type=Mul]; -"2818 Add_2574" [id=2818, type=Add]; -"2819 Unsqueeze_2575" [id=2819, type=Unsqueeze]; -"2820 Unsqueeze_2576" [id=2820, type=Unsqueeze]; -"2821 Unsqueeze_2577" [id=2821, type=Unsqueeze]; -"2822 Unsqueeze_2578" [id=2822, type=Unsqueeze]; -"2823 Concat_2579" [id=2823, type=Concat]; -"2824 Reshape_2580" [id=2824, type=Reshape]; -"2825 nncf_model_input_0" [id=2825, type=nncf_model_input]; -"2826 nncf_model_output_0" [id=2826, type=nncf_model_output]; -"2827 nncf_model_output_1" [id=2827, type=nncf_model_output]; -"2828 nncf_model_output_2" [id=2828, type=nncf_model_output]; -"2829 nncf_model_output_3" [id=2829, type=nncf_model_output]; -"2830 nncf_model_output_4" [id=2830, type=nncf_model_output]; -"2831 nncf_model_output_5" [id=2831, type=nncf_model_output]; -"2832 nncf_model_output_6" [id=2832, type=nncf_model_output]; -"2833 nncf_model_output_7" [id=2833, type=nncf_model_output]; -"2834 nncf_model_output_8" [id=2834, type=nncf_model_output]; -"2835 nncf_model_output_9" [id=2835, type=nncf_model_output]; -"2836 nncf_model_output_10" [id=2836, type=nncf_model_output]; -"2837 nncf_model_output_11" [id=2837, type=nncf_model_output]; -"2838 nncf_model_output_12" [id=2838, type=nncf_model_output]; +"9 Constant_nncf_9" [id=9, type=Constant]; +"10 Unsqueeze_9" [id=10, type=Unsqueeze]; +"11 Concat_10" [id=11, type=Concat]; +"12 Reshape_11" [id=12, type=Reshape]; +"13 Constant_nncf_13" [id=13, type=Constant]; +"14 Unsqueeze_12" [id=14, type=Unsqueeze]; +"15 Sub_13" [id=15, type=Sub]; +"16 Div_14" [id=16, type=Div]; +"17 ConstantOfShape_15" [id=17, type=ConstantOfShape]; +"18 NonZero_16" [id=18, type=NonZero]; +"19 Transpose_17" [id=19, type=Transpose]; +"20 Constant_nncf_20" [id=20, type=Constant]; +"21 Squeeze_18" [id=21, type=Squeeze]; +"22 Mul_19" [id=22, type=Mul]; +"23 Add_20" [id=23, type=Add]; +"24 Cast_21" [id=24, type=Cast]; +"25 Constant_nncf_25" [id=25, type=Constant]; +"26 Unsqueeze_22" [id=26, type=Unsqueeze]; +"27 Constant_nncf_27" [id=27, type=Constant]; +"28 Unsqueeze_23" [id=28, type=Unsqueeze]; +"29 Concat_24" [id=29, type=Concat]; +"30 Reshape_25" [id=30, type=Reshape]; +"31 QuantizeLinear_wte.weight_1" [id=31, type=QuantizeLinear]; +"32 DequantizeLinear_wte.weight_1" [id=32, type=DequantizeLinear]; +"33 Gather_26" [id=33, type=Gather]; +"34 QuantizeLinear_wpe.weight_1" [id=34, type=QuantizeLinear]; +"35 DequantizeLinear_wpe.weight_1" [id=35, type=DequantizeLinear]; +"36 Gather_27" [id=36, type=Gather]; +"37 Add_28" [id=37, type=Add]; +"38 Shape_29" [id=38, type=Shape]; +"39 Constant_30" [id=39, type=Constant]; +"40 Gather_31" [id=40, type=Gather]; +"41 ReduceMean_32" [id=41, type=ReduceMean]; +"42 Sub_33" [id=42, type=Sub]; +"43 Constant_34" [id=43, type=Constant]; +"44 Pow_35" [id=44, type=Pow]; +"45 ReduceMean_36" [id=45, type=ReduceMean]; +"46 Constant_37" [id=46, type=Constant]; +"47 Add_38" [id=47, type=Add]; +"48 Sqrt_39" [id=48, type=Sqrt]; +"49 Div_40" [id=49, type=Div]; +"50 Mul_41" [id=50, type=Mul]; +"51 Add_42" [id=51, type=Add]; +"52 QuantizeLinear_211_1" [id=52, type=QuantizeLinear]; +"53 DequantizeLinear_211_1" [id=53, type=DequantizeLinear]; +"54 Shape_43" [id=54, type=Shape]; +"55 Constant_44" [id=55, type=Constant]; +"56 Gather_45" [id=56, type=Gather]; +"57 Shape_46" [id=57, type=Shape]; +"58 Constant_47" [id=58, type=Constant]; +"59 Gather_48" [id=59, type=Gather]; +"60 Shape_49" [id=60, type=Shape]; +"61 Constant_50" [id=61, type=Constant]; +"62 Gather_51" [id=62, type=Gather]; +"63 Constant_nncf_57" [id=63, type=Constant]; +"64 Unsqueeze_52" [id=64, type=Unsqueeze]; +"65 Concat_53" [id=65, type=Concat]; +"66 Reshape_54" [id=66, type=Reshape]; +"67 QuantizeLinear_h.0.attn.c_attn.weight_1" [id=67, type=QuantizeLinear]; +"68 DequantizeLinear_h.0.attn.c_attn.weight_1" [id=68, type=DequantizeLinear]; +"69 Gemm_55" [id=69, type=Gemm]; +"70 Constant_nncf_62" [id=70, type=Constant]; +"71 Unsqueeze_56" [id=71, type=Unsqueeze]; +"72 Constant_nncf_64" [id=72, type=Constant]; +"73 Unsqueeze_57" [id=73, type=Unsqueeze]; +"74 Concat_58" [id=74, type=Concat]; +"75 Reshape_59" [id=75, type=Reshape]; +"76 Constant_nncf_68" [id=76, type=Constant]; +"77 Split_60" [id=77, type=Split]; +"78 QuantizeLinear_query.1_1" [id=78, type=QuantizeLinear]; +"79 DequantizeLinear_query.1_1" [id=79, type=DequantizeLinear]; +"80 Shape_61" [id=80, type=Shape]; +"81 Constant_62" [id=81, type=Constant]; +"82 Gather_63" [id=82, type=Gather]; +"83 Shape_64" [id=83, type=Shape]; +"84 Constant_65" [id=84, type=Constant]; +"85 Gather_66" [id=85, type=Gather]; +"86 Shape_67" [id=86, type=Shape]; +"87 Constant_68" [id=87, type=Constant]; +"88 Gather_69" [id=88, type=Gather]; +"89 Constant_70" [id=89, type=Constant]; +"90 Div_71" [id=90, type=Div]; +"91 Cast_72" [id=91, type=Cast]; +"92 Cast_73" [id=92, type=Cast]; +"93 Constant_nncf_83" [id=93, type=Constant]; +"94 Unsqueeze_74" [id=94, type=Unsqueeze]; +"95 Constant_nncf_85" [id=95, type=Constant]; +"96 Unsqueeze_75" [id=96, type=Unsqueeze]; +"97 Constant_nncf_87" [id=97, type=Constant]; +"98 Unsqueeze_76" [id=98, type=Unsqueeze]; +"99 Concat_77" [id=99, type=Concat]; +"100 Reshape_78" [id=100, type=Reshape]; +"101 Transpose_79" [id=101, type=Transpose]; +"102 Shape_80" [id=102, type=Shape]; +"103 Constant_81" [id=103, type=Constant]; +"104 Gather_82" [id=104, type=Gather]; +"105 Shape_83" [id=105, type=Shape]; +"106 Constant_84" [id=106, type=Constant]; +"107 Gather_85" [id=107, type=Gather]; +"108 Shape_86" [id=108, type=Shape]; +"109 Constant_87" [id=109, type=Constant]; +"110 Gather_88" [id=110, type=Gather]; +"111 Constant_89" [id=111, type=Constant]; +"112 Div_90" [id=112, type=Div]; +"113 Cast_91" [id=113, type=Cast]; +"114 Cast_92" [id=114, type=Cast]; +"115 Constant_nncf_105" [id=115, type=Constant]; +"116 Unsqueeze_93" [id=116, type=Unsqueeze]; +"117 Constant_nncf_107" [id=117, type=Constant]; +"118 Unsqueeze_94" [id=118, type=Unsqueeze]; +"119 Constant_nncf_109" [id=119, type=Constant]; +"120 Unsqueeze_95" [id=120, type=Unsqueeze]; +"121 Concat_96" [id=121, type=Concat]; +"122 Reshape_97" [id=122, type=Reshape]; +"123 QuantizeLinear_276_1" [id=123, type=QuantizeLinear]; +"124 DequantizeLinear_276_1" [id=124, type=DequantizeLinear]; +"125 Transpose_98" [id=125, type=Transpose]; +"126 Shape_99" [id=126, type=Shape]; +"127 Constant_100" [id=127, type=Constant]; +"128 Gather_101" [id=128, type=Gather]; +"129 Shape_102" [id=129, type=Shape]; +"130 Constant_103" [id=130, type=Constant]; +"131 Gather_104" [id=131, type=Gather]; +"132 Shape_105" [id=132, type=Shape]; +"133 Constant_106" [id=133, type=Constant]; +"134 Gather_107" [id=134, type=Gather]; +"135 Constant_108" [id=135, type=Constant]; +"136 Div_109" [id=136, type=Div]; +"137 Cast_110" [id=137, type=Cast]; +"138 Cast_111" [id=138, type=Cast]; +"139 Constant_nncf_127" [id=139, type=Constant]; +"140 Unsqueeze_112" [id=140, type=Unsqueeze]; +"141 Constant_nncf_129" [id=141, type=Constant]; +"142 Unsqueeze_113" [id=142, type=Unsqueeze]; +"143 Constant_nncf_131" [id=143, type=Constant]; +"144 Unsqueeze_114" [id=144, type=Unsqueeze]; +"145 Concat_115" [id=145, type=Concat]; +"146 Reshape_116" [id=146, type=Reshape]; +"147 Transpose_117" [id=147, type=Transpose]; +"148 Transpose_118" [id=148, type=Transpose]; +"149 Constant_nncf_137" [id=149, type=Constant]; +"150 Unsqueeze_119" [id=150, type=Unsqueeze]; +"151 Constant_nncf_139" [id=151, type=Constant]; +"152 Unsqueeze_120" [id=152, type=Unsqueeze]; +"153 Concat_121" [id=153, type=Concat]; +"154 MatMul_122" [id=154, type=MatMul]; +"155 Constant_123" [id=155, type=Constant]; +"156 Div_124" [id=156, type=Div]; +"157 Shape_125" [id=157, type=Shape]; +"158 Constant_126" [id=158, type=Constant]; +"159 Gather_127" [id=159, type=Gather]; +"160 Shape_128" [id=160, type=Shape]; +"161 Constant_129" [id=161, type=Constant]; +"162 Gather_130" [id=162, type=Gather]; +"163 Sub_131" [id=163, type=Sub]; +"164 Constant_nncf_152" [id=164, type=Constant]; +"165 Unsqueeze_132" [id=165, type=Unsqueeze]; +"166 Constant_nncf_154" [id=166, type=Constant]; +"167 Unsqueeze_133" [id=167, type=Unsqueeze]; +"168 Constant_134" [id=168, type=Constant]; +"169 Slice_135" [id=169, type=Slice]; +"170 Constant_nncf_158" [id=170, type=Constant]; +"171 Unsqueeze_136" [id=171, type=Unsqueeze]; +"172 Constant_137" [id=172, type=Constant]; +"173 Slice_138" [id=173, type=Slice]; +"174 Mul_139" [id=174, type=Mul]; +"175 Constant_140" [id=175, type=Constant]; +"176 Sub_141" [id=176, type=Sub]; +"177 Constant_142" [id=177, type=Constant]; +"178 Mul_143" [id=178, type=Mul]; +"179 Sub_144" [id=179, type=Sub]; +"180 Shape_nncf_168" [id=180, type=Shape]; +"181 Flatten_nncf_169" [id=181, type=Flatten]; +"182 Softmax_145" [id=182, type=Softmax]; +"183 Reshape_nncf_171" [id=183, type=Reshape]; +"184 MatMul_146" [id=184, type=MatMul]; +"185 QuantizeLinear_333_1" [id=185, type=QuantizeLinear]; +"186 DequantizeLinear_333_1" [id=186, type=DequantizeLinear]; +"187 Transpose_147" [id=187, type=Transpose]; +"188 Shape_148" [id=188, type=Shape]; +"189 Constant_149" [id=189, type=Constant]; +"190 Gather_150" [id=190, type=Gather]; +"191 Shape_151" [id=191, type=Shape]; +"192 Constant_152" [id=192, type=Constant]; +"193 Gather_153" [id=193, type=Gather]; +"194 Shape_154" [id=194, type=Shape]; +"195 Constant_155" [id=195, type=Constant]; +"196 Gather_156" [id=196, type=Gather]; +"197 Shape_157" [id=197, type=Shape]; +"198 Constant_158" [id=198, type=Constant]; +"199 Gather_159" [id=199, type=Gather]; +"200 Mul_160" [id=200, type=Mul]; +"201 Constant_nncf_187" [id=201, type=Constant]; +"202 Unsqueeze_161" [id=202, type=Unsqueeze]; +"203 Constant_nncf_189" [id=203, type=Constant]; +"204 Unsqueeze_162" [id=204, type=Unsqueeze]; +"205 Constant_nncf_191" [id=205, type=Constant]; +"206 Unsqueeze_163" [id=206, type=Unsqueeze]; +"207 Concat_164" [id=207, type=Concat]; +"208 Reshape_165" [id=208, type=Reshape]; +"209 Shape_166" [id=209, type=Shape]; +"210 Constant_167" [id=210, type=Constant]; +"211 Gather_168" [id=211, type=Gather]; +"212 Shape_169" [id=212, type=Shape]; +"213 Constant_170" [id=213, type=Constant]; +"214 Gather_171" [id=214, type=Gather]; +"215 Shape_172" [id=215, type=Shape]; +"216 Constant_173" [id=216, type=Constant]; +"217 Gather_174" [id=217, type=Gather]; +"218 Constant_nncf_204" [id=218, type=Constant]; +"219 Unsqueeze_175" [id=219, type=Unsqueeze]; +"220 Concat_176" [id=220, type=Concat]; +"221 Reshape_177" [id=221, type=Reshape]; +"222 QuantizeLinear_h.0.attn.c_proj.weight_1" [id=222, type=QuantizeLinear]; +"223 DequantizeLinear_h.0.attn.c_proj.weight_1" [id=223, type=DequantizeLinear]; +"224 Gemm_178" [id=224, type=Gemm]; +"225 Constant_nncf_209" [id=225, type=Constant]; +"226 Unsqueeze_179" [id=226, type=Unsqueeze]; +"227 Constant_nncf_211" [id=227, type=Constant]; +"228 Unsqueeze_180" [id=228, type=Unsqueeze]; +"229 Concat_181" [id=229, type=Concat]; +"230 Reshape_182" [id=230, type=Reshape]; +"231 Add_183" [id=231, type=Add]; +"232 ReduceMean_184" [id=232, type=ReduceMean]; +"233 Sub_185" [id=233, type=Sub]; +"234 Constant_186" [id=234, type=Constant]; +"235 Pow_187" [id=235, type=Pow]; +"236 ReduceMean_188" [id=236, type=ReduceMean]; +"237 Constant_189" [id=237, type=Constant]; +"238 Add_190" [id=238, type=Add]; +"239 Sqrt_191" [id=239, type=Sqrt]; +"240 Div_192" [id=240, type=Div]; +"241 Mul_193" [id=241, type=Mul]; +"242 Add_194" [id=242, type=Add]; +"243 QuantizeLinear_385_1" [id=243, type=QuantizeLinear]; +"244 DequantizeLinear_385_1" [id=244, type=DequantizeLinear]; +"245 Shape_195" [id=245, type=Shape]; +"246 Constant_196" [id=246, type=Constant]; +"247 Gather_197" [id=247, type=Gather]; +"248 Shape_198" [id=248, type=Shape]; +"249 Constant_199" [id=249, type=Constant]; +"250 Gather_200" [id=250, type=Gather]; +"251 Shape_201" [id=251, type=Shape]; +"252 Constant_202" [id=252, type=Constant]; +"253 Gather_203" [id=253, type=Gather]; +"254 Constant_nncf_236" [id=254, type=Constant]; +"255 Unsqueeze_204" [id=255, type=Unsqueeze]; +"256 Concat_205" [id=256, type=Concat]; +"257 Reshape_206" [id=257, type=Reshape]; +"258 QuantizeLinear_h.0.mlp.c_fc.weight_1" [id=258, type=QuantizeLinear]; +"259 DequantizeLinear_h.0.mlp.c_fc.weight_1" [id=259, type=DequantizeLinear]; +"260 Gemm_207" [id=260, type=Gemm]; +"261 Constant_nncf_241" [id=261, type=Constant]; +"262 Unsqueeze_208" [id=262, type=Unsqueeze]; +"263 Constant_nncf_243" [id=263, type=Constant]; +"264 Unsqueeze_209" [id=264, type=Unsqueeze]; +"265 Concat_210" [id=265, type=Concat]; +"266 Reshape_211" [id=266, type=Reshape]; +"267 Constant_212" [id=267, type=Constant]; +"268 Mul_213" [id=268, type=Mul]; +"269 Constant_214" [id=269, type=Constant]; +"270 Pow_215" [id=270, type=Pow]; +"271 Constant_216" [id=271, type=Constant]; +"272 Mul_217" [id=272, type=Mul]; +"273 Add_218" [id=273, type=Add]; +"274 Constant_219" [id=274, type=Constant]; +"275 Mul_220" [id=275, type=Mul]; +"276 Tanh_221" [id=276, type=Tanh]; +"277 Constant_222" [id=277, type=Constant]; +"278 Add_223" [id=278, type=Add]; +"279 Mul_224" [id=279, type=Mul]; +"280 QuantizeLinear_419_1" [id=280, type=QuantizeLinear]; +"281 DequantizeLinear_419_1" [id=281, type=DequantizeLinear]; +"282 Shape_225" [id=282, type=Shape]; +"283 Constant_226" [id=283, type=Constant]; +"284 Gather_227" [id=284, type=Gather]; +"285 Shape_228" [id=285, type=Shape]; +"286 Constant_229" [id=286, type=Constant]; +"287 Gather_230" [id=287, type=Gather]; +"288 Shape_231" [id=288, type=Shape]; +"289 Constant_232" [id=289, type=Constant]; +"290 Gather_233" [id=290, type=Gather]; +"291 Constant_nncf_269" [id=291, type=Constant]; +"292 Unsqueeze_234" [id=292, type=Unsqueeze]; +"293 Concat_235" [id=293, type=Concat]; +"294 Reshape_236" [id=294, type=Reshape]; +"295 QuantizeLinear_h.0.mlp.c_proj.weight_1" [id=295, type=QuantizeLinear]; +"296 DequantizeLinear_h.0.mlp.c_proj.weight_1" [id=296, type=DequantizeLinear]; +"297 Gemm_237" [id=297, type=Gemm]; +"298 Constant_nncf_274" [id=298, type=Constant]; +"299 Unsqueeze_238" [id=299, type=Unsqueeze]; +"300 Constant_nncf_276" [id=300, type=Constant]; +"301 Unsqueeze_239" [id=301, type=Unsqueeze]; +"302 Concat_240" [id=302, type=Concat]; +"303 Reshape_241" [id=303, type=Reshape]; +"304 Add_242" [id=304, type=Add]; +"305 ReduceMean_243" [id=305, type=ReduceMean]; +"306 Sub_244" [id=306, type=Sub]; +"307 Constant_245" [id=307, type=Constant]; +"308 Pow_246" [id=308, type=Pow]; +"309 ReduceMean_247" [id=309, type=ReduceMean]; +"310 Constant_248" [id=310, type=Constant]; +"311 Add_249" [id=311, type=Add]; +"312 Sqrt_250" [id=312, type=Sqrt]; +"313 Div_251" [id=313, type=Div]; +"314 Mul_252" [id=314, type=Mul]; +"315 Add_253" [id=315, type=Add]; +"316 QuantizeLinear_452_1" [id=316, type=QuantizeLinear]; +"317 DequantizeLinear_452_1" [id=317, type=DequantizeLinear]; +"318 Shape_254" [id=318, type=Shape]; +"319 Constant_255" [id=319, type=Constant]; +"320 Gather_256" [id=320, type=Gather]; +"321 Shape_257" [id=321, type=Shape]; +"322 Constant_258" [id=322, type=Constant]; +"323 Gather_259" [id=323, type=Gather]; +"324 Shape_260" [id=324, type=Shape]; +"325 Constant_261" [id=325, type=Constant]; +"326 Gather_262" [id=326, type=Gather]; +"327 Constant_nncf_301" [id=327, type=Constant]; +"328 Unsqueeze_263" [id=328, type=Unsqueeze]; +"329 Concat_264" [id=329, type=Concat]; +"330 Reshape_265" [id=330, type=Reshape]; +"331 QuantizeLinear_h.1.attn.c_attn.weight_1" [id=331, type=QuantizeLinear]; +"332 DequantizeLinear_h.1.attn.c_attn.weight_1" [id=332, type=DequantizeLinear]; +"333 Gemm_266" [id=333, type=Gemm]; +"334 Constant_nncf_306" [id=334, type=Constant]; +"335 Unsqueeze_267" [id=335, type=Unsqueeze]; +"336 Constant_nncf_308" [id=336, type=Constant]; +"337 Unsqueeze_268" [id=337, type=Unsqueeze]; +"338 Concat_269" [id=338, type=Concat]; +"339 Reshape_270" [id=339, type=Reshape]; +"340 Constant_nncf_312" [id=340, type=Constant]; +"341 Split_271" [id=341, type=Split]; +"342 QuantizeLinear_query.3_1" [id=342, type=QuantizeLinear]; +"343 DequantizeLinear_query.3_1" [id=343, type=DequantizeLinear]; +"344 Shape_272" [id=344, type=Shape]; +"345 Constant_273" [id=345, type=Constant]; +"346 Gather_274" [id=346, type=Gather]; +"347 Shape_275" [id=347, type=Shape]; +"348 Constant_276" [id=348, type=Constant]; +"349 Gather_277" [id=349, type=Gather]; +"350 Shape_278" [id=350, type=Shape]; +"351 Constant_279" [id=351, type=Constant]; +"352 Gather_280" [id=352, type=Gather]; +"353 Constant_281" [id=353, type=Constant]; +"354 Div_282" [id=354, type=Div]; +"355 Cast_283" [id=355, type=Cast]; +"356 Cast_284" [id=356, type=Cast]; +"357 Constant_nncf_327" [id=357, type=Constant]; +"358 Unsqueeze_285" [id=358, type=Unsqueeze]; +"359 Constant_nncf_329" [id=359, type=Constant]; +"360 Unsqueeze_286" [id=360, type=Unsqueeze]; +"361 Constant_nncf_331" [id=361, type=Constant]; +"362 Unsqueeze_287" [id=362, type=Unsqueeze]; +"363 Concat_288" [id=363, type=Concat]; +"364 Reshape_289" [id=364, type=Reshape]; +"365 Transpose_290" [id=365, type=Transpose]; +"366 Shape_291" [id=366, type=Shape]; +"367 Constant_292" [id=367, type=Constant]; +"368 Gather_293" [id=368, type=Gather]; +"369 Shape_294" [id=369, type=Shape]; +"370 Constant_295" [id=370, type=Constant]; +"371 Gather_296" [id=371, type=Gather]; +"372 Shape_297" [id=372, type=Shape]; +"373 Constant_298" [id=373, type=Constant]; +"374 Gather_299" [id=374, type=Gather]; +"375 Constant_300" [id=375, type=Constant]; +"376 Div_301" [id=376, type=Div]; +"377 Cast_302" [id=377, type=Cast]; +"378 Cast_303" [id=378, type=Cast]; +"379 Constant_nncf_349" [id=379, type=Constant]; +"380 Unsqueeze_304" [id=380, type=Unsqueeze]; +"381 Constant_nncf_351" [id=381, type=Constant]; +"382 Unsqueeze_305" [id=382, type=Unsqueeze]; +"383 Constant_nncf_353" [id=383, type=Constant]; +"384 Unsqueeze_306" [id=384, type=Unsqueeze]; +"385 Concat_307" [id=385, type=Concat]; +"386 Reshape_308" [id=386, type=Reshape]; +"387 QuantizeLinear_517_1" [id=387, type=QuantizeLinear]; +"388 DequantizeLinear_517_1" [id=388, type=DequantizeLinear]; +"389 Transpose_309" [id=389, type=Transpose]; +"390 Shape_310" [id=390, type=Shape]; +"391 Constant_311" [id=391, type=Constant]; +"392 Gather_312" [id=392, type=Gather]; +"393 Shape_313" [id=393, type=Shape]; +"394 Constant_314" [id=394, type=Constant]; +"395 Gather_315" [id=395, type=Gather]; +"396 Shape_316" [id=396, type=Shape]; +"397 Constant_317" [id=397, type=Constant]; +"398 Gather_318" [id=398, type=Gather]; +"399 Constant_319" [id=399, type=Constant]; +"400 Div_320" [id=400, type=Div]; +"401 Cast_321" [id=401, type=Cast]; +"402 Cast_322" [id=402, type=Cast]; +"403 Constant_nncf_371" [id=403, type=Constant]; +"404 Unsqueeze_323" [id=404, type=Unsqueeze]; +"405 Constant_nncf_373" [id=405, type=Constant]; +"406 Unsqueeze_324" [id=406, type=Unsqueeze]; +"407 Constant_nncf_375" [id=407, type=Constant]; +"408 Unsqueeze_325" [id=408, type=Unsqueeze]; +"409 Concat_326" [id=409, type=Concat]; +"410 Reshape_327" [id=410, type=Reshape]; +"411 Transpose_328" [id=411, type=Transpose]; +"412 Transpose_329" [id=412, type=Transpose]; +"413 Constant_nncf_381" [id=413, type=Constant]; +"414 Unsqueeze_330" [id=414, type=Unsqueeze]; +"415 Constant_nncf_383" [id=415, type=Constant]; +"416 Unsqueeze_331" [id=416, type=Unsqueeze]; +"417 Concat_332" [id=417, type=Concat]; +"418 MatMul_333" [id=418, type=MatMul]; +"419 Constant_334" [id=419, type=Constant]; +"420 Div_335" [id=420, type=Div]; +"421 Shape_336" [id=421, type=Shape]; +"422 Constant_337" [id=422, type=Constant]; +"423 Gather_338" [id=423, type=Gather]; +"424 Shape_339" [id=424, type=Shape]; +"425 Constant_340" [id=425, type=Constant]; +"426 Gather_341" [id=426, type=Gather]; +"427 Sub_342" [id=427, type=Sub]; +"428 Constant_nncf_396" [id=428, type=Constant]; +"429 Unsqueeze_343" [id=429, type=Unsqueeze]; +"430 Constant_nncf_398" [id=430, type=Constant]; +"431 Unsqueeze_344" [id=431, type=Unsqueeze]; +"432 Constant_345" [id=432, type=Constant]; +"433 Slice_346" [id=433, type=Slice]; +"434 Constant_nncf_402" [id=434, type=Constant]; +"435 Unsqueeze_347" [id=435, type=Unsqueeze]; +"436 Constant_348" [id=436, type=Constant]; +"437 Slice_349" [id=437, type=Slice]; +"438 Mul_350" [id=438, type=Mul]; +"439 Constant_351" [id=439, type=Constant]; +"440 Sub_352" [id=440, type=Sub]; +"441 Constant_353" [id=441, type=Constant]; +"442 Mul_354" [id=442, type=Mul]; +"443 Sub_355" [id=443, type=Sub]; +"444 Shape_nncf_412" [id=444, type=Shape]; +"445 Flatten_nncf_413" [id=445, type=Flatten]; +"446 Softmax_356" [id=446, type=Softmax]; +"447 Reshape_nncf_415" [id=447, type=Reshape]; +"448 MatMul_357" [id=448, type=MatMul]; +"449 QuantizeLinear_574_1" [id=449, type=QuantizeLinear]; +"450 DequantizeLinear_574_1" [id=450, type=DequantizeLinear]; +"451 Transpose_358" [id=451, type=Transpose]; +"452 Shape_359" [id=452, type=Shape]; +"453 Constant_360" [id=453, type=Constant]; +"454 Gather_361" [id=454, type=Gather]; +"455 Shape_362" [id=455, type=Shape]; +"456 Constant_363" [id=456, type=Constant]; +"457 Gather_364" [id=457, type=Gather]; +"458 Shape_365" [id=458, type=Shape]; +"459 Constant_366" [id=459, type=Constant]; +"460 Gather_367" [id=460, type=Gather]; +"461 Shape_368" [id=461, type=Shape]; +"462 Constant_369" [id=462, type=Constant]; +"463 Gather_370" [id=463, type=Gather]; +"464 Mul_371" [id=464, type=Mul]; +"465 Constant_nncf_431" [id=465, type=Constant]; +"466 Unsqueeze_372" [id=466, type=Unsqueeze]; +"467 Constant_nncf_433" [id=467, type=Constant]; +"468 Unsqueeze_373" [id=468, type=Unsqueeze]; +"469 Constant_nncf_435" [id=469, type=Constant]; +"470 Unsqueeze_374" [id=470, type=Unsqueeze]; +"471 Concat_375" [id=471, type=Concat]; +"472 Reshape_376" [id=472, type=Reshape]; +"473 Shape_377" [id=473, type=Shape]; +"474 Constant_378" [id=474, type=Constant]; +"475 Gather_379" [id=475, type=Gather]; +"476 Shape_380" [id=476, type=Shape]; +"477 Constant_381" [id=477, type=Constant]; +"478 Gather_382" [id=478, type=Gather]; +"479 Shape_383" [id=479, type=Shape]; +"480 Constant_384" [id=480, type=Constant]; +"481 Gather_385" [id=481, type=Gather]; +"482 Constant_nncf_448" [id=482, type=Constant]; +"483 Unsqueeze_386" [id=483, type=Unsqueeze]; +"484 Concat_387" [id=484, type=Concat]; +"485 Reshape_388" [id=485, type=Reshape]; +"486 QuantizeLinear_h.1.attn.c_proj.weight_1" [id=486, type=QuantizeLinear]; +"487 DequantizeLinear_h.1.attn.c_proj.weight_1" [id=487, type=DequantizeLinear]; +"488 Gemm_389" [id=488, type=Gemm]; +"489 Constant_nncf_453" [id=489, type=Constant]; +"490 Unsqueeze_390" [id=490, type=Unsqueeze]; +"491 Constant_nncf_455" [id=491, type=Constant]; +"492 Unsqueeze_391" [id=492, type=Unsqueeze]; +"493 Concat_392" [id=493, type=Concat]; +"494 Reshape_393" [id=494, type=Reshape]; +"495 Add_394" [id=495, type=Add]; +"496 ReduceMean_395" [id=496, type=ReduceMean]; +"497 Sub_396" [id=497, type=Sub]; +"498 Constant_397" [id=498, type=Constant]; +"499 Pow_398" [id=499, type=Pow]; +"500 ReduceMean_399" [id=500, type=ReduceMean]; +"501 Constant_400" [id=501, type=Constant]; +"502 Add_401" [id=502, type=Add]; +"503 Sqrt_402" [id=503, type=Sqrt]; +"504 Div_403" [id=504, type=Div]; +"505 Mul_404" [id=505, type=Mul]; +"506 Add_405" [id=506, type=Add]; +"507 QuantizeLinear_626_1" [id=507, type=QuantizeLinear]; +"508 DequantizeLinear_626_1" [id=508, type=DequantizeLinear]; +"509 Shape_406" [id=509, type=Shape]; +"510 Constant_407" [id=510, type=Constant]; +"511 Gather_408" [id=511, type=Gather]; +"512 Shape_409" [id=512, type=Shape]; +"513 Constant_410" [id=513, type=Constant]; +"514 Gather_411" [id=514, type=Gather]; +"515 Shape_412" [id=515, type=Shape]; +"516 Constant_413" [id=516, type=Constant]; +"517 Gather_414" [id=517, type=Gather]; +"518 Constant_nncf_480" [id=518, type=Constant]; +"519 Unsqueeze_415" [id=519, type=Unsqueeze]; +"520 Concat_416" [id=520, type=Concat]; +"521 Reshape_417" [id=521, type=Reshape]; +"522 QuantizeLinear_h.1.mlp.c_fc.weight_1" [id=522, type=QuantizeLinear]; +"523 DequantizeLinear_h.1.mlp.c_fc.weight_1" [id=523, type=DequantizeLinear]; +"524 Gemm_418" [id=524, type=Gemm]; +"525 Constant_nncf_485" [id=525, type=Constant]; +"526 Unsqueeze_419" [id=526, type=Unsqueeze]; +"527 Constant_nncf_487" [id=527, type=Constant]; +"528 Unsqueeze_420" [id=528, type=Unsqueeze]; +"529 Concat_421" [id=529, type=Concat]; +"530 Reshape_422" [id=530, type=Reshape]; +"531 Constant_423" [id=531, type=Constant]; +"532 Mul_424" [id=532, type=Mul]; +"533 Constant_425" [id=533, type=Constant]; +"534 Pow_426" [id=534, type=Pow]; +"535 Constant_427" [id=535, type=Constant]; +"536 Mul_428" [id=536, type=Mul]; +"537 Add_429" [id=537, type=Add]; +"538 Constant_430" [id=538, type=Constant]; +"539 Mul_431" [id=539, type=Mul]; +"540 Tanh_432" [id=540, type=Tanh]; +"541 Constant_433" [id=541, type=Constant]; +"542 Add_434" [id=542, type=Add]; +"543 Mul_435" [id=543, type=Mul]; +"544 QuantizeLinear_660_1" [id=544, type=QuantizeLinear]; +"545 DequantizeLinear_660_1" [id=545, type=DequantizeLinear]; +"546 Shape_436" [id=546, type=Shape]; +"547 Constant_437" [id=547, type=Constant]; +"548 Gather_438" [id=548, type=Gather]; +"549 Shape_439" [id=549, type=Shape]; +"550 Constant_440" [id=550, type=Constant]; +"551 Gather_441" [id=551, type=Gather]; +"552 Shape_442" [id=552, type=Shape]; +"553 Constant_443" [id=553, type=Constant]; +"554 Gather_444" [id=554, type=Gather]; +"555 Constant_nncf_513" [id=555, type=Constant]; +"556 Unsqueeze_445" [id=556, type=Unsqueeze]; +"557 Concat_446" [id=557, type=Concat]; +"558 Reshape_447" [id=558, type=Reshape]; +"559 QuantizeLinear_h.1.mlp.c_proj.weight_1" [id=559, type=QuantizeLinear]; +"560 DequantizeLinear_h.1.mlp.c_proj.weight_1" [id=560, type=DequantizeLinear]; +"561 Gemm_448" [id=561, type=Gemm]; +"562 Constant_nncf_518" [id=562, type=Constant]; +"563 Unsqueeze_449" [id=563, type=Unsqueeze]; +"564 Constant_nncf_520" [id=564, type=Constant]; +"565 Unsqueeze_450" [id=565, type=Unsqueeze]; +"566 Concat_451" [id=566, type=Concat]; +"567 Reshape_452" [id=567, type=Reshape]; +"568 Add_453" [id=568, type=Add]; +"569 ReduceMean_454" [id=569, type=ReduceMean]; +"570 Sub_455" [id=570, type=Sub]; +"571 Constant_456" [id=571, type=Constant]; +"572 Pow_457" [id=572, type=Pow]; +"573 ReduceMean_458" [id=573, type=ReduceMean]; +"574 Constant_459" [id=574, type=Constant]; +"575 Add_460" [id=575, type=Add]; +"576 Sqrt_461" [id=576, type=Sqrt]; +"577 Div_462" [id=577, type=Div]; +"578 Mul_463" [id=578, type=Mul]; +"579 Add_464" [id=579, type=Add]; +"580 QuantizeLinear_693_1" [id=580, type=QuantizeLinear]; +"581 DequantizeLinear_693_1" [id=581, type=DequantizeLinear]; +"582 Shape_465" [id=582, type=Shape]; +"583 Constant_466" [id=583, type=Constant]; +"584 Gather_467" [id=584, type=Gather]; +"585 Shape_468" [id=585, type=Shape]; +"586 Constant_469" [id=586, type=Constant]; +"587 Gather_470" [id=587, type=Gather]; +"588 Shape_471" [id=588, type=Shape]; +"589 Constant_472" [id=589, type=Constant]; +"590 Gather_473" [id=590, type=Gather]; +"591 Constant_nncf_545" [id=591, type=Constant]; +"592 Unsqueeze_474" [id=592, type=Unsqueeze]; +"593 Concat_475" [id=593, type=Concat]; +"594 Reshape_476" [id=594, type=Reshape]; +"595 QuantizeLinear_h.2.attn.c_attn.weight_1" [id=595, type=QuantizeLinear]; +"596 DequantizeLinear_h.2.attn.c_attn.weight_1" [id=596, type=DequantizeLinear]; +"597 Gemm_477" [id=597, type=Gemm]; +"598 Constant_nncf_550" [id=598, type=Constant]; +"599 Unsqueeze_478" [id=599, type=Unsqueeze]; +"600 Constant_nncf_552" [id=600, type=Constant]; +"601 Unsqueeze_479" [id=601, type=Unsqueeze]; +"602 Concat_480" [id=602, type=Concat]; +"603 Reshape_481" [id=603, type=Reshape]; +"604 Constant_nncf_556" [id=604, type=Constant]; +"605 Split_482" [id=605, type=Split]; +"606 QuantizeLinear_query.5_1" [id=606, type=QuantizeLinear]; +"607 DequantizeLinear_query.5_1" [id=607, type=DequantizeLinear]; +"608 Shape_483" [id=608, type=Shape]; +"609 Constant_484" [id=609, type=Constant]; +"610 Gather_485" [id=610, type=Gather]; +"611 Shape_486" [id=611, type=Shape]; +"612 Constant_487" [id=612, type=Constant]; +"613 Gather_488" [id=613, type=Gather]; +"614 Shape_489" [id=614, type=Shape]; +"615 Constant_490" [id=615, type=Constant]; +"616 Gather_491" [id=616, type=Gather]; +"617 Constant_492" [id=617, type=Constant]; +"618 Div_493" [id=618, type=Div]; +"619 Cast_494" [id=619, type=Cast]; +"620 Cast_495" [id=620, type=Cast]; +"621 Constant_nncf_571" [id=621, type=Constant]; +"622 Unsqueeze_496" [id=622, type=Unsqueeze]; +"623 Constant_nncf_573" [id=623, type=Constant]; +"624 Unsqueeze_497" [id=624, type=Unsqueeze]; +"625 Constant_nncf_575" [id=625, type=Constant]; +"626 Unsqueeze_498" [id=626, type=Unsqueeze]; +"627 Concat_499" [id=627, type=Concat]; +"628 Reshape_500" [id=628, type=Reshape]; +"629 Transpose_501" [id=629, type=Transpose]; +"630 Shape_502" [id=630, type=Shape]; +"631 Constant_503" [id=631, type=Constant]; +"632 Gather_504" [id=632, type=Gather]; +"633 Shape_505" [id=633, type=Shape]; +"634 Constant_506" [id=634, type=Constant]; +"635 Gather_507" [id=635, type=Gather]; +"636 Shape_508" [id=636, type=Shape]; +"637 Constant_509" [id=637, type=Constant]; +"638 Gather_510" [id=638, type=Gather]; +"639 Constant_511" [id=639, type=Constant]; +"640 Div_512" [id=640, type=Div]; +"641 Cast_513" [id=641, type=Cast]; +"642 Cast_514" [id=642, type=Cast]; +"643 Constant_nncf_593" [id=643, type=Constant]; +"644 Unsqueeze_515" [id=644, type=Unsqueeze]; +"645 Constant_nncf_595" [id=645, type=Constant]; +"646 Unsqueeze_516" [id=646, type=Unsqueeze]; +"647 Constant_nncf_597" [id=647, type=Constant]; +"648 Unsqueeze_517" [id=648, type=Unsqueeze]; +"649 Concat_518" [id=649, type=Concat]; +"650 Reshape_519" [id=650, type=Reshape]; +"651 QuantizeLinear_758_1" [id=651, type=QuantizeLinear]; +"652 DequantizeLinear_758_1" [id=652, type=DequantizeLinear]; +"653 Transpose_520" [id=653, type=Transpose]; +"654 Shape_521" [id=654, type=Shape]; +"655 Constant_522" [id=655, type=Constant]; +"656 Gather_523" [id=656, type=Gather]; +"657 Shape_524" [id=657, type=Shape]; +"658 Constant_525" [id=658, type=Constant]; +"659 Gather_526" [id=659, type=Gather]; +"660 Shape_527" [id=660, type=Shape]; +"661 Constant_528" [id=661, type=Constant]; +"662 Gather_529" [id=662, type=Gather]; +"663 Constant_530" [id=663, type=Constant]; +"664 Div_531" [id=664, type=Div]; +"665 Cast_532" [id=665, type=Cast]; +"666 Cast_533" [id=666, type=Cast]; +"667 Constant_nncf_615" [id=667, type=Constant]; +"668 Unsqueeze_534" [id=668, type=Unsqueeze]; +"669 Constant_nncf_617" [id=669, type=Constant]; +"670 Unsqueeze_535" [id=670, type=Unsqueeze]; +"671 Constant_nncf_619" [id=671, type=Constant]; +"672 Unsqueeze_536" [id=672, type=Unsqueeze]; +"673 Concat_537" [id=673, type=Concat]; +"674 Reshape_538" [id=674, type=Reshape]; +"675 Transpose_539" [id=675, type=Transpose]; +"676 Transpose_540" [id=676, type=Transpose]; +"677 Constant_nncf_625" [id=677, type=Constant]; +"678 Unsqueeze_541" [id=678, type=Unsqueeze]; +"679 Constant_nncf_627" [id=679, type=Constant]; +"680 Unsqueeze_542" [id=680, type=Unsqueeze]; +"681 Concat_543" [id=681, type=Concat]; +"682 MatMul_544" [id=682, type=MatMul]; +"683 Constant_545" [id=683, type=Constant]; +"684 Div_546" [id=684, type=Div]; +"685 Shape_547" [id=685, type=Shape]; +"686 Constant_548" [id=686, type=Constant]; +"687 Gather_549" [id=687, type=Gather]; +"688 Shape_550" [id=688, type=Shape]; +"689 Constant_551" [id=689, type=Constant]; +"690 Gather_552" [id=690, type=Gather]; +"691 Sub_553" [id=691, type=Sub]; +"692 Constant_nncf_640" [id=692, type=Constant]; +"693 Unsqueeze_554" [id=693, type=Unsqueeze]; +"694 Constant_nncf_642" [id=694, type=Constant]; +"695 Unsqueeze_555" [id=695, type=Unsqueeze]; +"696 Constant_556" [id=696, type=Constant]; +"697 Slice_557" [id=697, type=Slice]; +"698 Constant_nncf_646" [id=698, type=Constant]; +"699 Unsqueeze_558" [id=699, type=Unsqueeze]; +"700 Constant_559" [id=700, type=Constant]; +"701 Slice_560" [id=701, type=Slice]; +"702 Mul_561" [id=702, type=Mul]; +"703 Constant_562" [id=703, type=Constant]; +"704 Sub_563" [id=704, type=Sub]; +"705 Constant_564" [id=705, type=Constant]; +"706 Mul_565" [id=706, type=Mul]; +"707 Sub_566" [id=707, type=Sub]; +"708 Shape_nncf_656" [id=708, type=Shape]; +"709 Flatten_nncf_657" [id=709, type=Flatten]; +"710 Softmax_567" [id=710, type=Softmax]; +"711 Reshape_nncf_659" [id=711, type=Reshape]; +"712 MatMul_568" [id=712, type=MatMul]; +"713 QuantizeLinear_815_1" [id=713, type=QuantizeLinear]; +"714 DequantizeLinear_815_1" [id=714, type=DequantizeLinear]; +"715 Transpose_569" [id=715, type=Transpose]; +"716 Shape_570" [id=716, type=Shape]; +"717 Constant_571" [id=717, type=Constant]; +"718 Gather_572" [id=718, type=Gather]; +"719 Shape_573" [id=719, type=Shape]; +"720 Constant_574" [id=720, type=Constant]; +"721 Gather_575" [id=721, type=Gather]; +"722 Shape_576" [id=722, type=Shape]; +"723 Constant_577" [id=723, type=Constant]; +"724 Gather_578" [id=724, type=Gather]; +"725 Shape_579" [id=725, type=Shape]; +"726 Constant_580" [id=726, type=Constant]; +"727 Gather_581" [id=727, type=Gather]; +"728 Mul_582" [id=728, type=Mul]; +"729 Constant_nncf_675" [id=729, type=Constant]; +"730 Unsqueeze_583" [id=730, type=Unsqueeze]; +"731 Constant_nncf_677" [id=731, type=Constant]; +"732 Unsqueeze_584" [id=732, type=Unsqueeze]; +"733 Constant_nncf_679" [id=733, type=Constant]; +"734 Unsqueeze_585" [id=734, type=Unsqueeze]; +"735 Concat_586" [id=735, type=Concat]; +"736 Reshape_587" [id=736, type=Reshape]; +"737 Shape_588" [id=737, type=Shape]; +"738 Constant_589" [id=738, type=Constant]; +"739 Gather_590" [id=739, type=Gather]; +"740 Shape_591" [id=740, type=Shape]; +"741 Constant_592" [id=741, type=Constant]; +"742 Gather_593" [id=742, type=Gather]; +"743 Shape_594" [id=743, type=Shape]; +"744 Constant_595" [id=744, type=Constant]; +"745 Gather_596" [id=745, type=Gather]; +"746 Constant_nncf_692" [id=746, type=Constant]; +"747 Unsqueeze_597" [id=747, type=Unsqueeze]; +"748 Concat_598" [id=748, type=Concat]; +"749 Reshape_599" [id=749, type=Reshape]; +"750 QuantizeLinear_h.2.attn.c_proj.weight_1" [id=750, type=QuantizeLinear]; +"751 DequantizeLinear_h.2.attn.c_proj.weight_1" [id=751, type=DequantizeLinear]; +"752 Gemm_600" [id=752, type=Gemm]; +"753 Constant_nncf_697" [id=753, type=Constant]; +"754 Unsqueeze_601" [id=754, type=Unsqueeze]; +"755 Constant_nncf_699" [id=755, type=Constant]; +"756 Unsqueeze_602" [id=756, type=Unsqueeze]; +"757 Concat_603" [id=757, type=Concat]; +"758 Reshape_604" [id=758, type=Reshape]; +"759 Add_605" [id=759, type=Add]; +"760 ReduceMean_606" [id=760, type=ReduceMean]; +"761 Sub_607" [id=761, type=Sub]; +"762 Constant_608" [id=762, type=Constant]; +"763 Pow_609" [id=763, type=Pow]; +"764 ReduceMean_610" [id=764, type=ReduceMean]; +"765 Constant_611" [id=765, type=Constant]; +"766 Add_612" [id=766, type=Add]; +"767 Sqrt_613" [id=767, type=Sqrt]; +"768 Div_614" [id=768, type=Div]; +"769 Mul_615" [id=769, type=Mul]; +"770 Add_616" [id=770, type=Add]; +"771 QuantizeLinear_867_1" [id=771, type=QuantizeLinear]; +"772 DequantizeLinear_867_1" [id=772, type=DequantizeLinear]; +"773 Shape_617" [id=773, type=Shape]; +"774 Constant_618" [id=774, type=Constant]; +"775 Gather_619" [id=775, type=Gather]; +"776 Shape_620" [id=776, type=Shape]; +"777 Constant_621" [id=777, type=Constant]; +"778 Gather_622" [id=778, type=Gather]; +"779 Shape_623" [id=779, type=Shape]; +"780 Constant_624" [id=780, type=Constant]; +"781 Gather_625" [id=781, type=Gather]; +"782 Constant_nncf_724" [id=782, type=Constant]; +"783 Unsqueeze_626" [id=783, type=Unsqueeze]; +"784 Concat_627" [id=784, type=Concat]; +"785 Reshape_628" [id=785, type=Reshape]; +"786 QuantizeLinear_h.2.mlp.c_fc.weight_1" [id=786, type=QuantizeLinear]; +"787 DequantizeLinear_h.2.mlp.c_fc.weight_1" [id=787, type=DequantizeLinear]; +"788 Gemm_629" [id=788, type=Gemm]; +"789 Constant_nncf_729" [id=789, type=Constant]; +"790 Unsqueeze_630" [id=790, type=Unsqueeze]; +"791 Constant_nncf_731" [id=791, type=Constant]; +"792 Unsqueeze_631" [id=792, type=Unsqueeze]; +"793 Concat_632" [id=793, type=Concat]; +"794 Reshape_633" [id=794, type=Reshape]; +"795 Constant_634" [id=795, type=Constant]; +"796 Mul_635" [id=796, type=Mul]; +"797 Constant_636" [id=797, type=Constant]; +"798 Pow_637" [id=798, type=Pow]; +"799 Constant_638" [id=799, type=Constant]; +"800 Mul_639" [id=800, type=Mul]; +"801 Add_640" [id=801, type=Add]; +"802 Constant_641" [id=802, type=Constant]; +"803 Mul_642" [id=803, type=Mul]; +"804 Tanh_643" [id=804, type=Tanh]; +"805 Constant_644" [id=805, type=Constant]; +"806 Add_645" [id=806, type=Add]; +"807 Mul_646" [id=807, type=Mul]; +"808 QuantizeLinear_901_1" [id=808, type=QuantizeLinear]; +"809 DequantizeLinear_901_1" [id=809, type=DequantizeLinear]; +"810 Shape_647" [id=810, type=Shape]; +"811 Constant_648" [id=811, type=Constant]; +"812 Gather_649" [id=812, type=Gather]; +"813 Shape_650" [id=813, type=Shape]; +"814 Constant_651" [id=814, type=Constant]; +"815 Gather_652" [id=815, type=Gather]; +"816 Shape_653" [id=816, type=Shape]; +"817 Constant_654" [id=817, type=Constant]; +"818 Gather_655" [id=818, type=Gather]; +"819 Constant_nncf_757" [id=819, type=Constant]; +"820 Unsqueeze_656" [id=820, type=Unsqueeze]; +"821 Concat_657" [id=821, type=Concat]; +"822 Reshape_658" [id=822, type=Reshape]; +"823 QuantizeLinear_h.2.mlp.c_proj.weight_1" [id=823, type=QuantizeLinear]; +"824 DequantizeLinear_h.2.mlp.c_proj.weight_1" [id=824, type=DequantizeLinear]; +"825 Gemm_659" [id=825, type=Gemm]; +"826 Constant_nncf_762" [id=826, type=Constant]; +"827 Unsqueeze_660" [id=827, type=Unsqueeze]; +"828 Constant_nncf_764" [id=828, type=Constant]; +"829 Unsqueeze_661" [id=829, type=Unsqueeze]; +"830 Concat_662" [id=830, type=Concat]; +"831 Reshape_663" [id=831, type=Reshape]; +"832 Add_664" [id=832, type=Add]; +"833 ReduceMean_665" [id=833, type=ReduceMean]; +"834 Sub_666" [id=834, type=Sub]; +"835 Constant_667" [id=835, type=Constant]; +"836 Pow_668" [id=836, type=Pow]; +"837 ReduceMean_669" [id=837, type=ReduceMean]; +"838 Constant_670" [id=838, type=Constant]; +"839 Add_671" [id=839, type=Add]; +"840 Sqrt_672" [id=840, type=Sqrt]; +"841 Div_673" [id=841, type=Div]; +"842 Mul_674" [id=842, type=Mul]; +"843 Add_675" [id=843, type=Add]; +"844 QuantizeLinear_934_1" [id=844, type=QuantizeLinear]; +"845 DequantizeLinear_934_1" [id=845, type=DequantizeLinear]; +"846 Shape_676" [id=846, type=Shape]; +"847 Constant_677" [id=847, type=Constant]; +"848 Gather_678" [id=848, type=Gather]; +"849 Shape_679" [id=849, type=Shape]; +"850 Constant_680" [id=850, type=Constant]; +"851 Gather_681" [id=851, type=Gather]; +"852 Shape_682" [id=852, type=Shape]; +"853 Constant_683" [id=853, type=Constant]; +"854 Gather_684" [id=854, type=Gather]; +"855 Constant_nncf_789" [id=855, type=Constant]; +"856 Unsqueeze_685" [id=856, type=Unsqueeze]; +"857 Concat_686" [id=857, type=Concat]; +"858 Reshape_687" [id=858, type=Reshape]; +"859 QuantizeLinear_h.3.attn.c_attn.weight_1" [id=859, type=QuantizeLinear]; +"860 DequantizeLinear_h.3.attn.c_attn.weight_1" [id=860, type=DequantizeLinear]; +"861 Gemm_688" [id=861, type=Gemm]; +"862 Constant_nncf_794" [id=862, type=Constant]; +"863 Unsqueeze_689" [id=863, type=Unsqueeze]; +"864 Constant_nncf_796" [id=864, type=Constant]; +"865 Unsqueeze_690" [id=865, type=Unsqueeze]; +"866 Concat_691" [id=866, type=Concat]; +"867 Reshape_692" [id=867, type=Reshape]; +"868 Constant_nncf_800" [id=868, type=Constant]; +"869 Split_693" [id=869, type=Split]; +"870 QuantizeLinear_query.7_1" [id=870, type=QuantizeLinear]; +"871 DequantizeLinear_query.7_1" [id=871, type=DequantizeLinear]; +"872 Shape_694" [id=872, type=Shape]; +"873 Constant_695" [id=873, type=Constant]; +"874 Gather_696" [id=874, type=Gather]; +"875 Shape_697" [id=875, type=Shape]; +"876 Constant_698" [id=876, type=Constant]; +"877 Gather_699" [id=877, type=Gather]; +"878 Shape_700" [id=878, type=Shape]; +"879 Constant_701" [id=879, type=Constant]; +"880 Gather_702" [id=880, type=Gather]; +"881 Constant_703" [id=881, type=Constant]; +"882 Div_704" [id=882, type=Div]; +"883 Cast_705" [id=883, type=Cast]; +"884 Cast_706" [id=884, type=Cast]; +"885 Constant_nncf_815" [id=885, type=Constant]; +"886 Unsqueeze_707" [id=886, type=Unsqueeze]; +"887 Constant_nncf_817" [id=887, type=Constant]; +"888 Unsqueeze_708" [id=888, type=Unsqueeze]; +"889 Constant_nncf_819" [id=889, type=Constant]; +"890 Unsqueeze_709" [id=890, type=Unsqueeze]; +"891 Concat_710" [id=891, type=Concat]; +"892 Reshape_711" [id=892, type=Reshape]; +"893 Transpose_712" [id=893, type=Transpose]; +"894 Shape_713" [id=894, type=Shape]; +"895 Constant_714" [id=895, type=Constant]; +"896 Gather_715" [id=896, type=Gather]; +"897 Shape_716" [id=897, type=Shape]; +"898 Constant_717" [id=898, type=Constant]; +"899 Gather_718" [id=899, type=Gather]; +"900 Shape_719" [id=900, type=Shape]; +"901 Constant_720" [id=901, type=Constant]; +"902 Gather_721" [id=902, type=Gather]; +"903 Constant_722" [id=903, type=Constant]; +"904 Div_723" [id=904, type=Div]; +"905 Cast_724" [id=905, type=Cast]; +"906 Cast_725" [id=906, type=Cast]; +"907 Constant_nncf_837" [id=907, type=Constant]; +"908 Unsqueeze_726" [id=908, type=Unsqueeze]; +"909 Constant_nncf_839" [id=909, type=Constant]; +"910 Unsqueeze_727" [id=910, type=Unsqueeze]; +"911 Constant_nncf_841" [id=911, type=Constant]; +"912 Unsqueeze_728" [id=912, type=Unsqueeze]; +"913 Concat_729" [id=913, type=Concat]; +"914 Reshape_730" [id=914, type=Reshape]; +"915 QuantizeLinear_999_1" [id=915, type=QuantizeLinear]; +"916 DequantizeLinear_999_1" [id=916, type=DequantizeLinear]; +"917 Transpose_731" [id=917, type=Transpose]; +"918 Shape_732" [id=918, type=Shape]; +"919 Constant_733" [id=919, type=Constant]; +"920 Gather_734" [id=920, type=Gather]; +"921 Shape_735" [id=921, type=Shape]; +"922 Constant_736" [id=922, type=Constant]; +"923 Gather_737" [id=923, type=Gather]; +"924 Shape_738" [id=924, type=Shape]; +"925 Constant_739" [id=925, type=Constant]; +"926 Gather_740" [id=926, type=Gather]; +"927 Constant_741" [id=927, type=Constant]; +"928 Div_742" [id=928, type=Div]; +"929 Cast_743" [id=929, type=Cast]; +"930 Cast_744" [id=930, type=Cast]; +"931 Constant_nncf_859" [id=931, type=Constant]; +"932 Unsqueeze_745" [id=932, type=Unsqueeze]; +"933 Constant_nncf_861" [id=933, type=Constant]; +"934 Unsqueeze_746" [id=934, type=Unsqueeze]; +"935 Constant_nncf_863" [id=935, type=Constant]; +"936 Unsqueeze_747" [id=936, type=Unsqueeze]; +"937 Concat_748" [id=937, type=Concat]; +"938 Reshape_749" [id=938, type=Reshape]; +"939 Transpose_750" [id=939, type=Transpose]; +"940 Transpose_751" [id=940, type=Transpose]; +"941 Constant_nncf_869" [id=941, type=Constant]; +"942 Unsqueeze_752" [id=942, type=Unsqueeze]; +"943 Constant_nncf_871" [id=943, type=Constant]; +"944 Unsqueeze_753" [id=944, type=Unsqueeze]; +"945 Concat_754" [id=945, type=Concat]; +"946 MatMul_755" [id=946, type=MatMul]; +"947 Constant_756" [id=947, type=Constant]; +"948 Div_757" [id=948, type=Div]; +"949 Shape_758" [id=949, type=Shape]; +"950 Constant_759" [id=950, type=Constant]; +"951 Gather_760" [id=951, type=Gather]; +"952 Shape_761" [id=952, type=Shape]; +"953 Constant_762" [id=953, type=Constant]; +"954 Gather_763" [id=954, type=Gather]; +"955 Sub_764" [id=955, type=Sub]; +"956 Constant_nncf_884" [id=956, type=Constant]; +"957 Unsqueeze_765" [id=957, type=Unsqueeze]; +"958 Constant_nncf_886" [id=958, type=Constant]; +"959 Unsqueeze_766" [id=959, type=Unsqueeze]; +"960 Constant_767" [id=960, type=Constant]; +"961 Slice_768" [id=961, type=Slice]; +"962 Constant_nncf_890" [id=962, type=Constant]; +"963 Unsqueeze_769" [id=963, type=Unsqueeze]; +"964 Constant_770" [id=964, type=Constant]; +"965 Slice_771" [id=965, type=Slice]; +"966 Mul_772" [id=966, type=Mul]; +"967 Constant_773" [id=967, type=Constant]; +"968 Sub_774" [id=968, type=Sub]; +"969 Constant_775" [id=969, type=Constant]; +"970 Mul_776" [id=970, type=Mul]; +"971 Sub_777" [id=971, type=Sub]; +"972 Shape_nncf_900" [id=972, type=Shape]; +"973 Flatten_nncf_901" [id=973, type=Flatten]; +"974 Softmax_778" [id=974, type=Softmax]; +"975 Reshape_nncf_903" [id=975, type=Reshape]; +"976 MatMul_779" [id=976, type=MatMul]; +"977 QuantizeLinear_1056_1" [id=977, type=QuantizeLinear]; +"978 DequantizeLinear_1056_1" [id=978, type=DequantizeLinear]; +"979 Transpose_780" [id=979, type=Transpose]; +"980 Shape_781" [id=980, type=Shape]; +"981 Constant_782" [id=981, type=Constant]; +"982 Gather_783" [id=982, type=Gather]; +"983 Shape_784" [id=983, type=Shape]; +"984 Constant_785" [id=984, type=Constant]; +"985 Gather_786" [id=985, type=Gather]; +"986 Shape_787" [id=986, type=Shape]; +"987 Constant_788" [id=987, type=Constant]; +"988 Gather_789" [id=988, type=Gather]; +"989 Shape_790" [id=989, type=Shape]; +"990 Constant_791" [id=990, type=Constant]; +"991 Gather_792" [id=991, type=Gather]; +"992 Mul_793" [id=992, type=Mul]; +"993 Constant_nncf_919" [id=993, type=Constant]; +"994 Unsqueeze_794" [id=994, type=Unsqueeze]; +"995 Constant_nncf_921" [id=995, type=Constant]; +"996 Unsqueeze_795" [id=996, type=Unsqueeze]; +"997 Constant_nncf_923" [id=997, type=Constant]; +"998 Unsqueeze_796" [id=998, type=Unsqueeze]; +"999 Concat_797" [id=999, type=Concat]; +"1000 Reshape_798" [id=1000, type=Reshape]; +"1001 Shape_799" [id=1001, type=Shape]; +"1002 Constant_800" [id=1002, type=Constant]; +"1003 Gather_801" [id=1003, type=Gather]; +"1004 Shape_802" [id=1004, type=Shape]; +"1005 Constant_803" [id=1005, type=Constant]; +"1006 Gather_804" [id=1006, type=Gather]; +"1007 Shape_805" [id=1007, type=Shape]; +"1008 Constant_806" [id=1008, type=Constant]; +"1009 Gather_807" [id=1009, type=Gather]; +"1010 Constant_nncf_936" [id=1010, type=Constant]; +"1011 Unsqueeze_808" [id=1011, type=Unsqueeze]; +"1012 Concat_809" [id=1012, type=Concat]; +"1013 Reshape_810" [id=1013, type=Reshape]; +"1014 QuantizeLinear_h.3.attn.c_proj.weight_1" [id=1014, type=QuantizeLinear]; +"1015 DequantizeLinear_h.3.attn.c_proj.weight_1" [id=1015, type=DequantizeLinear]; +"1016 Gemm_811" [id=1016, type=Gemm]; +"1017 Constant_nncf_941" [id=1017, type=Constant]; +"1018 Unsqueeze_812" [id=1018, type=Unsqueeze]; +"1019 Constant_nncf_943" [id=1019, type=Constant]; +"1020 Unsqueeze_813" [id=1020, type=Unsqueeze]; +"1021 Concat_814" [id=1021, type=Concat]; +"1022 Reshape_815" [id=1022, type=Reshape]; +"1023 Add_816" [id=1023, type=Add]; +"1024 ReduceMean_817" [id=1024, type=ReduceMean]; +"1025 Sub_818" [id=1025, type=Sub]; +"1026 Constant_819" [id=1026, type=Constant]; +"1027 Pow_820" [id=1027, type=Pow]; +"1028 ReduceMean_821" [id=1028, type=ReduceMean]; +"1029 Constant_822" [id=1029, type=Constant]; +"1030 Add_823" [id=1030, type=Add]; +"1031 Sqrt_824" [id=1031, type=Sqrt]; +"1032 Div_825" [id=1032, type=Div]; +"1033 Mul_826" [id=1033, type=Mul]; +"1034 Add_827" [id=1034, type=Add]; +"1035 QuantizeLinear_1108_1" [id=1035, type=QuantizeLinear]; +"1036 DequantizeLinear_1108_1" [id=1036, type=DequantizeLinear]; +"1037 Shape_828" [id=1037, type=Shape]; +"1038 Constant_829" [id=1038, type=Constant]; +"1039 Gather_830" [id=1039, type=Gather]; +"1040 Shape_831" [id=1040, type=Shape]; +"1041 Constant_832" [id=1041, type=Constant]; +"1042 Gather_833" [id=1042, type=Gather]; +"1043 Shape_834" [id=1043, type=Shape]; +"1044 Constant_835" [id=1044, type=Constant]; +"1045 Gather_836" [id=1045, type=Gather]; +"1046 Constant_nncf_968" [id=1046, type=Constant]; +"1047 Unsqueeze_837" [id=1047, type=Unsqueeze]; +"1048 Concat_838" [id=1048, type=Concat]; +"1049 Reshape_839" [id=1049, type=Reshape]; +"1050 QuantizeLinear_h.3.mlp.c_fc.weight_1" [id=1050, type=QuantizeLinear]; +"1051 DequantizeLinear_h.3.mlp.c_fc.weight_1" [id=1051, type=DequantizeLinear]; +"1052 Gemm_840" [id=1052, type=Gemm]; +"1053 Constant_nncf_973" [id=1053, type=Constant]; +"1054 Unsqueeze_841" [id=1054, type=Unsqueeze]; +"1055 Constant_nncf_975" [id=1055, type=Constant]; +"1056 Unsqueeze_842" [id=1056, type=Unsqueeze]; +"1057 Concat_843" [id=1057, type=Concat]; +"1058 Reshape_844" [id=1058, type=Reshape]; +"1059 Constant_845" [id=1059, type=Constant]; +"1060 Mul_846" [id=1060, type=Mul]; +"1061 Constant_847" [id=1061, type=Constant]; +"1062 Pow_848" [id=1062, type=Pow]; +"1063 Constant_849" [id=1063, type=Constant]; +"1064 Mul_850" [id=1064, type=Mul]; +"1065 Add_851" [id=1065, type=Add]; +"1066 Constant_852" [id=1066, type=Constant]; +"1067 Mul_853" [id=1067, type=Mul]; +"1068 Tanh_854" [id=1068, type=Tanh]; +"1069 Constant_855" [id=1069, type=Constant]; +"1070 Add_856" [id=1070, type=Add]; +"1071 Mul_857" [id=1071, type=Mul]; +"1072 QuantizeLinear_1142_1" [id=1072, type=QuantizeLinear]; +"1073 DequantizeLinear_1142_1" [id=1073, type=DequantizeLinear]; +"1074 Shape_858" [id=1074, type=Shape]; +"1075 Constant_859" [id=1075, type=Constant]; +"1076 Gather_860" [id=1076, type=Gather]; +"1077 Shape_861" [id=1077, type=Shape]; +"1078 Constant_862" [id=1078, type=Constant]; +"1079 Gather_863" [id=1079, type=Gather]; +"1080 Shape_864" [id=1080, type=Shape]; +"1081 Constant_865" [id=1081, type=Constant]; +"1082 Gather_866" [id=1082, type=Gather]; +"1083 Constant_nncf_1001" [id=1083, type=Constant]; +"1084 Unsqueeze_867" [id=1084, type=Unsqueeze]; +"1085 Concat_868" [id=1085, type=Concat]; +"1086 Reshape_869" [id=1086, type=Reshape]; +"1087 QuantizeLinear_h.3.mlp.c_proj.weight_1" [id=1087, type=QuantizeLinear]; +"1088 DequantizeLinear_h.3.mlp.c_proj.weight_1" [id=1088, type=DequantizeLinear]; +"1089 Gemm_870" [id=1089, type=Gemm]; +"1090 Constant_nncf_1006" [id=1090, type=Constant]; +"1091 Unsqueeze_871" [id=1091, type=Unsqueeze]; +"1092 Constant_nncf_1008" [id=1092, type=Constant]; +"1093 Unsqueeze_872" [id=1093, type=Unsqueeze]; +"1094 Concat_873" [id=1094, type=Concat]; +"1095 Reshape_874" [id=1095, type=Reshape]; +"1096 Add_875" [id=1096, type=Add]; +"1097 ReduceMean_876" [id=1097, type=ReduceMean]; +"1098 Sub_877" [id=1098, type=Sub]; +"1099 Constant_878" [id=1099, type=Constant]; +"1100 Pow_879" [id=1100, type=Pow]; +"1101 ReduceMean_880" [id=1101, type=ReduceMean]; +"1102 Constant_881" [id=1102, type=Constant]; +"1103 Add_882" [id=1103, type=Add]; +"1104 Sqrt_883" [id=1104, type=Sqrt]; +"1105 Div_884" [id=1105, type=Div]; +"1106 Mul_885" [id=1106, type=Mul]; +"1107 Add_886" [id=1107, type=Add]; +"1108 QuantizeLinear_1175_1" [id=1108, type=QuantizeLinear]; +"1109 DequantizeLinear_1175_1" [id=1109, type=DequantizeLinear]; +"1110 Shape_887" [id=1110, type=Shape]; +"1111 Constant_888" [id=1111, type=Constant]; +"1112 Gather_889" [id=1112, type=Gather]; +"1113 Shape_890" [id=1113, type=Shape]; +"1114 Constant_891" [id=1114, type=Constant]; +"1115 Gather_892" [id=1115, type=Gather]; +"1116 Shape_893" [id=1116, type=Shape]; +"1117 Constant_894" [id=1117, type=Constant]; +"1118 Gather_895" [id=1118, type=Gather]; +"1119 Constant_nncf_1033" [id=1119, type=Constant]; +"1120 Unsqueeze_896" [id=1120, type=Unsqueeze]; +"1121 Concat_897" [id=1121, type=Concat]; +"1122 Reshape_898" [id=1122, type=Reshape]; +"1123 QuantizeLinear_h.4.attn.c_attn.weight_1" [id=1123, type=QuantizeLinear]; +"1124 DequantizeLinear_h.4.attn.c_attn.weight_1" [id=1124, type=DequantizeLinear]; +"1125 Gemm_899" [id=1125, type=Gemm]; +"1126 Constant_nncf_1038" [id=1126, type=Constant]; +"1127 Unsqueeze_900" [id=1127, type=Unsqueeze]; +"1128 Constant_nncf_1040" [id=1128, type=Constant]; +"1129 Unsqueeze_901" [id=1129, type=Unsqueeze]; +"1130 Concat_902" [id=1130, type=Concat]; +"1131 Reshape_903" [id=1131, type=Reshape]; +"1132 Constant_nncf_1044" [id=1132, type=Constant]; +"1133 Split_904" [id=1133, type=Split]; +"1134 QuantizeLinear_query.9_1" [id=1134, type=QuantizeLinear]; +"1135 DequantizeLinear_query.9_1" [id=1135, type=DequantizeLinear]; +"1136 Shape_905" [id=1136, type=Shape]; +"1137 Constant_906" [id=1137, type=Constant]; +"1138 Gather_907" [id=1138, type=Gather]; +"1139 Shape_908" [id=1139, type=Shape]; +"1140 Constant_909" [id=1140, type=Constant]; +"1141 Gather_910" [id=1141, type=Gather]; +"1142 Shape_911" [id=1142, type=Shape]; +"1143 Constant_912" [id=1143, type=Constant]; +"1144 Gather_913" [id=1144, type=Gather]; +"1145 Constant_914" [id=1145, type=Constant]; +"1146 Div_915" [id=1146, type=Div]; +"1147 Cast_916" [id=1147, type=Cast]; +"1148 Cast_917" [id=1148, type=Cast]; +"1149 Constant_nncf_1059" [id=1149, type=Constant]; +"1150 Unsqueeze_918" [id=1150, type=Unsqueeze]; +"1151 Constant_nncf_1061" [id=1151, type=Constant]; +"1152 Unsqueeze_919" [id=1152, type=Unsqueeze]; +"1153 Constant_nncf_1063" [id=1153, type=Constant]; +"1154 Unsqueeze_920" [id=1154, type=Unsqueeze]; +"1155 Concat_921" [id=1155, type=Concat]; +"1156 Reshape_922" [id=1156, type=Reshape]; +"1157 Transpose_923" [id=1157, type=Transpose]; +"1158 Shape_924" [id=1158, type=Shape]; +"1159 Constant_925" [id=1159, type=Constant]; +"1160 Gather_926" [id=1160, type=Gather]; +"1161 Shape_927" [id=1161, type=Shape]; +"1162 Constant_928" [id=1162, type=Constant]; +"1163 Gather_929" [id=1163, type=Gather]; +"1164 Shape_930" [id=1164, type=Shape]; +"1165 Constant_931" [id=1165, type=Constant]; +"1166 Gather_932" [id=1166, type=Gather]; +"1167 Constant_933" [id=1167, type=Constant]; +"1168 Div_934" [id=1168, type=Div]; +"1169 Cast_935" [id=1169, type=Cast]; +"1170 Cast_936" [id=1170, type=Cast]; +"1171 Constant_nncf_1081" [id=1171, type=Constant]; +"1172 Unsqueeze_937" [id=1172, type=Unsqueeze]; +"1173 Constant_nncf_1083" [id=1173, type=Constant]; +"1174 Unsqueeze_938" [id=1174, type=Unsqueeze]; +"1175 Constant_nncf_1085" [id=1175, type=Constant]; +"1176 Unsqueeze_939" [id=1176, type=Unsqueeze]; +"1177 Concat_940" [id=1177, type=Concat]; +"1178 Reshape_941" [id=1178, type=Reshape]; +"1179 QuantizeLinear_1240_1" [id=1179, type=QuantizeLinear]; +"1180 DequantizeLinear_1240_1" [id=1180, type=DequantizeLinear]; +"1181 Transpose_942" [id=1181, type=Transpose]; +"1182 Shape_943" [id=1182, type=Shape]; +"1183 Constant_944" [id=1183, type=Constant]; +"1184 Gather_945" [id=1184, type=Gather]; +"1185 Shape_946" [id=1185, type=Shape]; +"1186 Constant_947" [id=1186, type=Constant]; +"1187 Gather_948" [id=1187, type=Gather]; +"1188 Shape_949" [id=1188, type=Shape]; +"1189 Constant_950" [id=1189, type=Constant]; +"1190 Gather_951" [id=1190, type=Gather]; +"1191 Constant_952" [id=1191, type=Constant]; +"1192 Div_953" [id=1192, type=Div]; +"1193 Cast_954" [id=1193, type=Cast]; +"1194 Cast_955" [id=1194, type=Cast]; +"1195 Constant_nncf_1103" [id=1195, type=Constant]; +"1196 Unsqueeze_956" [id=1196, type=Unsqueeze]; +"1197 Constant_nncf_1105" [id=1197, type=Constant]; +"1198 Unsqueeze_957" [id=1198, type=Unsqueeze]; +"1199 Constant_nncf_1107" [id=1199, type=Constant]; +"1200 Unsqueeze_958" [id=1200, type=Unsqueeze]; +"1201 Concat_959" [id=1201, type=Concat]; +"1202 Reshape_960" [id=1202, type=Reshape]; +"1203 Transpose_961" [id=1203, type=Transpose]; +"1204 Transpose_962" [id=1204, type=Transpose]; +"1205 Constant_nncf_1113" [id=1205, type=Constant]; +"1206 Unsqueeze_963" [id=1206, type=Unsqueeze]; +"1207 Constant_nncf_1115" [id=1207, type=Constant]; +"1208 Unsqueeze_964" [id=1208, type=Unsqueeze]; +"1209 Concat_965" [id=1209, type=Concat]; +"1210 MatMul_966" [id=1210, type=MatMul]; +"1211 Constant_967" [id=1211, type=Constant]; +"1212 Div_968" [id=1212, type=Div]; +"1213 Shape_969" [id=1213, type=Shape]; +"1214 Constant_970" [id=1214, type=Constant]; +"1215 Gather_971" [id=1215, type=Gather]; +"1216 Shape_972" [id=1216, type=Shape]; +"1217 Constant_973" [id=1217, type=Constant]; +"1218 Gather_974" [id=1218, type=Gather]; +"1219 Sub_975" [id=1219, type=Sub]; +"1220 Constant_nncf_1128" [id=1220, type=Constant]; +"1221 Unsqueeze_976" [id=1221, type=Unsqueeze]; +"1222 Constant_nncf_1130" [id=1222, type=Constant]; +"1223 Unsqueeze_977" [id=1223, type=Unsqueeze]; +"1224 Constant_978" [id=1224, type=Constant]; +"1225 Slice_979" [id=1225, type=Slice]; +"1226 Constant_nncf_1134" [id=1226, type=Constant]; +"1227 Unsqueeze_980" [id=1227, type=Unsqueeze]; +"1228 Constant_981" [id=1228, type=Constant]; +"1229 Slice_982" [id=1229, type=Slice]; +"1230 Mul_983" [id=1230, type=Mul]; +"1231 Constant_984" [id=1231, type=Constant]; +"1232 Sub_985" [id=1232, type=Sub]; +"1233 Constant_986" [id=1233, type=Constant]; +"1234 Mul_987" [id=1234, type=Mul]; +"1235 Sub_988" [id=1235, type=Sub]; +"1236 Shape_nncf_1144" [id=1236, type=Shape]; +"1237 Flatten_nncf_1145" [id=1237, type=Flatten]; +"1238 Softmax_989" [id=1238, type=Softmax]; +"1239 Reshape_nncf_1147" [id=1239, type=Reshape]; +"1240 MatMul_990" [id=1240, type=MatMul]; +"1241 QuantizeLinear_1297_1" [id=1241, type=QuantizeLinear]; +"1242 DequantizeLinear_1297_1" [id=1242, type=DequantizeLinear]; +"1243 Transpose_991" [id=1243, type=Transpose]; +"1244 Shape_992" [id=1244, type=Shape]; +"1245 Constant_993" [id=1245, type=Constant]; +"1246 Gather_994" [id=1246, type=Gather]; +"1247 Shape_995" [id=1247, type=Shape]; +"1248 Constant_996" [id=1248, type=Constant]; +"1249 Gather_997" [id=1249, type=Gather]; +"1250 Shape_998" [id=1250, type=Shape]; +"1251 Constant_999" [id=1251, type=Constant]; +"1252 Gather_1000" [id=1252, type=Gather]; +"1253 Shape_1001" [id=1253, type=Shape]; +"1254 Constant_1002" [id=1254, type=Constant]; +"1255 Gather_1003" [id=1255, type=Gather]; +"1256 Mul_1004" [id=1256, type=Mul]; +"1257 Constant_nncf_1163" [id=1257, type=Constant]; +"1258 Unsqueeze_1005" [id=1258, type=Unsqueeze]; +"1259 Constant_nncf_1165" [id=1259, type=Constant]; +"1260 Unsqueeze_1006" [id=1260, type=Unsqueeze]; +"1261 Constant_nncf_1167" [id=1261, type=Constant]; +"1262 Unsqueeze_1007" [id=1262, type=Unsqueeze]; +"1263 Concat_1008" [id=1263, type=Concat]; +"1264 Reshape_1009" [id=1264, type=Reshape]; +"1265 Shape_1010" [id=1265, type=Shape]; +"1266 Constant_1011" [id=1266, type=Constant]; +"1267 Gather_1012" [id=1267, type=Gather]; +"1268 Shape_1013" [id=1268, type=Shape]; +"1269 Constant_1014" [id=1269, type=Constant]; +"1270 Gather_1015" [id=1270, type=Gather]; +"1271 Shape_1016" [id=1271, type=Shape]; +"1272 Constant_1017" [id=1272, type=Constant]; +"1273 Gather_1018" [id=1273, type=Gather]; +"1274 Constant_nncf_1180" [id=1274, type=Constant]; +"1275 Unsqueeze_1019" [id=1275, type=Unsqueeze]; +"1276 Concat_1020" [id=1276, type=Concat]; +"1277 Reshape_1021" [id=1277, type=Reshape]; +"1278 QuantizeLinear_h.4.attn.c_proj.weight_1" [id=1278, type=QuantizeLinear]; +"1279 DequantizeLinear_h.4.attn.c_proj.weight_1" [id=1279, type=DequantizeLinear]; +"1280 Gemm_1022" [id=1280, type=Gemm]; +"1281 Constant_nncf_1185" [id=1281, type=Constant]; +"1282 Unsqueeze_1023" [id=1282, type=Unsqueeze]; +"1283 Constant_nncf_1187" [id=1283, type=Constant]; +"1284 Unsqueeze_1024" [id=1284, type=Unsqueeze]; +"1285 Concat_1025" [id=1285, type=Concat]; +"1286 Reshape_1026" [id=1286, type=Reshape]; +"1287 Add_1027" [id=1287, type=Add]; +"1288 ReduceMean_1028" [id=1288, type=ReduceMean]; +"1289 Sub_1029" [id=1289, type=Sub]; +"1290 Constant_1030" [id=1290, type=Constant]; +"1291 Pow_1031" [id=1291, type=Pow]; +"1292 ReduceMean_1032" [id=1292, type=ReduceMean]; +"1293 Constant_1033" [id=1293, type=Constant]; +"1294 Add_1034" [id=1294, type=Add]; +"1295 Sqrt_1035" [id=1295, type=Sqrt]; +"1296 Div_1036" [id=1296, type=Div]; +"1297 Mul_1037" [id=1297, type=Mul]; +"1298 Add_1038" [id=1298, type=Add]; +"1299 QuantizeLinear_1349_1" [id=1299, type=QuantizeLinear]; +"1300 DequantizeLinear_1349_1" [id=1300, type=DequantizeLinear]; +"1301 Shape_1039" [id=1301, type=Shape]; +"1302 Constant_1040" [id=1302, type=Constant]; +"1303 Gather_1041" [id=1303, type=Gather]; +"1304 Shape_1042" [id=1304, type=Shape]; +"1305 Constant_1043" [id=1305, type=Constant]; +"1306 Gather_1044" [id=1306, type=Gather]; +"1307 Shape_1045" [id=1307, type=Shape]; +"1308 Constant_1046" [id=1308, type=Constant]; +"1309 Gather_1047" [id=1309, type=Gather]; +"1310 Constant_nncf_1212" [id=1310, type=Constant]; +"1311 Unsqueeze_1048" [id=1311, type=Unsqueeze]; +"1312 Concat_1049" [id=1312, type=Concat]; +"1313 Reshape_1050" [id=1313, type=Reshape]; +"1314 QuantizeLinear_h.4.mlp.c_fc.weight_1" [id=1314, type=QuantizeLinear]; +"1315 DequantizeLinear_h.4.mlp.c_fc.weight_1" [id=1315, type=DequantizeLinear]; +"1316 Gemm_1051" [id=1316, type=Gemm]; +"1317 Constant_nncf_1217" [id=1317, type=Constant]; +"1318 Unsqueeze_1052" [id=1318, type=Unsqueeze]; +"1319 Constant_nncf_1219" [id=1319, type=Constant]; +"1320 Unsqueeze_1053" [id=1320, type=Unsqueeze]; +"1321 Concat_1054" [id=1321, type=Concat]; +"1322 Reshape_1055" [id=1322, type=Reshape]; +"1323 Constant_1056" [id=1323, type=Constant]; +"1324 Mul_1057" [id=1324, type=Mul]; +"1325 Constant_1058" [id=1325, type=Constant]; +"1326 Pow_1059" [id=1326, type=Pow]; +"1327 Constant_1060" [id=1327, type=Constant]; +"1328 Mul_1061" [id=1328, type=Mul]; +"1329 Add_1062" [id=1329, type=Add]; +"1330 Constant_1063" [id=1330, type=Constant]; +"1331 Mul_1064" [id=1331, type=Mul]; +"1332 Tanh_1065" [id=1332, type=Tanh]; +"1333 Constant_1066" [id=1333, type=Constant]; +"1334 Add_1067" [id=1334, type=Add]; +"1335 Mul_1068" [id=1335, type=Mul]; +"1336 QuantizeLinear_1383_1" [id=1336, type=QuantizeLinear]; +"1337 DequantizeLinear_1383_1" [id=1337, type=DequantizeLinear]; +"1338 Shape_1069" [id=1338, type=Shape]; +"1339 Constant_1070" [id=1339, type=Constant]; +"1340 Gather_1071" [id=1340, type=Gather]; +"1341 Shape_1072" [id=1341, type=Shape]; +"1342 Constant_1073" [id=1342, type=Constant]; +"1343 Gather_1074" [id=1343, type=Gather]; +"1344 Shape_1075" [id=1344, type=Shape]; +"1345 Constant_1076" [id=1345, type=Constant]; +"1346 Gather_1077" [id=1346, type=Gather]; +"1347 Constant_nncf_1245" [id=1347, type=Constant]; +"1348 Unsqueeze_1078" [id=1348, type=Unsqueeze]; +"1349 Concat_1079" [id=1349, type=Concat]; +"1350 Reshape_1080" [id=1350, type=Reshape]; +"1351 QuantizeLinear_h.4.mlp.c_proj.weight_1" [id=1351, type=QuantizeLinear]; +"1352 DequantizeLinear_h.4.mlp.c_proj.weight_1" [id=1352, type=DequantizeLinear]; +"1353 Gemm_1081" [id=1353, type=Gemm]; +"1354 Constant_nncf_1250" [id=1354, type=Constant]; +"1355 Unsqueeze_1082" [id=1355, type=Unsqueeze]; +"1356 Constant_nncf_1252" [id=1356, type=Constant]; +"1357 Unsqueeze_1083" [id=1357, type=Unsqueeze]; +"1358 Concat_1084" [id=1358, type=Concat]; +"1359 Reshape_1085" [id=1359, type=Reshape]; +"1360 Add_1086" [id=1360, type=Add]; +"1361 ReduceMean_1087" [id=1361, type=ReduceMean]; +"1362 Sub_1088" [id=1362, type=Sub]; +"1363 Constant_1089" [id=1363, type=Constant]; +"1364 Pow_1090" [id=1364, type=Pow]; +"1365 ReduceMean_1091" [id=1365, type=ReduceMean]; +"1366 Constant_1092" [id=1366, type=Constant]; +"1367 Add_1093" [id=1367, type=Add]; +"1368 Sqrt_1094" [id=1368, type=Sqrt]; +"1369 Div_1095" [id=1369, type=Div]; +"1370 Mul_1096" [id=1370, type=Mul]; +"1371 Add_1097" [id=1371, type=Add]; +"1372 QuantizeLinear_1416_1" [id=1372, type=QuantizeLinear]; +"1373 DequantizeLinear_1416_1" [id=1373, type=DequantizeLinear]; +"1374 Shape_1098" [id=1374, type=Shape]; +"1375 Constant_1099" [id=1375, type=Constant]; +"1376 Gather_1100" [id=1376, type=Gather]; +"1377 Shape_1101" [id=1377, type=Shape]; +"1378 Constant_1102" [id=1378, type=Constant]; +"1379 Gather_1103" [id=1379, type=Gather]; +"1380 Shape_1104" [id=1380, type=Shape]; +"1381 Constant_1105" [id=1381, type=Constant]; +"1382 Gather_1106" [id=1382, type=Gather]; +"1383 Constant_nncf_1277" [id=1383, type=Constant]; +"1384 Unsqueeze_1107" [id=1384, type=Unsqueeze]; +"1385 Concat_1108" [id=1385, type=Concat]; +"1386 Reshape_1109" [id=1386, type=Reshape]; +"1387 QuantizeLinear_h.5.attn.c_attn.weight_1" [id=1387, type=QuantizeLinear]; +"1388 DequantizeLinear_h.5.attn.c_attn.weight_1" [id=1388, type=DequantizeLinear]; +"1389 Gemm_1110" [id=1389, type=Gemm]; +"1390 Constant_nncf_1282" [id=1390, type=Constant]; +"1391 Unsqueeze_1111" [id=1391, type=Unsqueeze]; +"1392 Constant_nncf_1284" [id=1392, type=Constant]; +"1393 Unsqueeze_1112" [id=1393, type=Unsqueeze]; +"1394 Concat_1113" [id=1394, type=Concat]; +"1395 Reshape_1114" [id=1395, type=Reshape]; +"1396 Constant_nncf_1288" [id=1396, type=Constant]; +"1397 Split_1115" [id=1397, type=Split]; +"1398 QuantizeLinear_query.11_1" [id=1398, type=QuantizeLinear]; +"1399 DequantizeLinear_query.11_1" [id=1399, type=DequantizeLinear]; +"1400 Shape_1116" [id=1400, type=Shape]; +"1401 Constant_1117" [id=1401, type=Constant]; +"1402 Gather_1118" [id=1402, type=Gather]; +"1403 Shape_1119" [id=1403, type=Shape]; +"1404 Constant_1120" [id=1404, type=Constant]; +"1405 Gather_1121" [id=1405, type=Gather]; +"1406 Shape_1122" [id=1406, type=Shape]; +"1407 Constant_1123" [id=1407, type=Constant]; +"1408 Gather_1124" [id=1408, type=Gather]; +"1409 Constant_1125" [id=1409, type=Constant]; +"1410 Div_1126" [id=1410, type=Div]; +"1411 Cast_1127" [id=1411, type=Cast]; +"1412 Cast_1128" [id=1412, type=Cast]; +"1413 Constant_nncf_1303" [id=1413, type=Constant]; +"1414 Unsqueeze_1129" [id=1414, type=Unsqueeze]; +"1415 Constant_nncf_1305" [id=1415, type=Constant]; +"1416 Unsqueeze_1130" [id=1416, type=Unsqueeze]; +"1417 Constant_nncf_1307" [id=1417, type=Constant]; +"1418 Unsqueeze_1131" [id=1418, type=Unsqueeze]; +"1419 Concat_1132" [id=1419, type=Concat]; +"1420 Reshape_1133" [id=1420, type=Reshape]; +"1421 Transpose_1134" [id=1421, type=Transpose]; +"1422 Shape_1135" [id=1422, type=Shape]; +"1423 Constant_1136" [id=1423, type=Constant]; +"1424 Gather_1137" [id=1424, type=Gather]; +"1425 Shape_1138" [id=1425, type=Shape]; +"1426 Constant_1139" [id=1426, type=Constant]; +"1427 Gather_1140" [id=1427, type=Gather]; +"1428 Shape_1141" [id=1428, type=Shape]; +"1429 Constant_1142" [id=1429, type=Constant]; +"1430 Gather_1143" [id=1430, type=Gather]; +"1431 Constant_1144" [id=1431, type=Constant]; +"1432 Div_1145" [id=1432, type=Div]; +"1433 Cast_1146" [id=1433, type=Cast]; +"1434 Cast_1147" [id=1434, type=Cast]; +"1435 Constant_nncf_1325" [id=1435, type=Constant]; +"1436 Unsqueeze_1148" [id=1436, type=Unsqueeze]; +"1437 Constant_nncf_1327" [id=1437, type=Constant]; +"1438 Unsqueeze_1149" [id=1438, type=Unsqueeze]; +"1439 Constant_nncf_1329" [id=1439, type=Constant]; +"1440 Unsqueeze_1150" [id=1440, type=Unsqueeze]; +"1441 Concat_1151" [id=1441, type=Concat]; +"1442 Reshape_1152" [id=1442, type=Reshape]; +"1443 QuantizeLinear_1481_1" [id=1443, type=QuantizeLinear]; +"1444 DequantizeLinear_1481_1" [id=1444, type=DequantizeLinear]; +"1445 Transpose_1153" [id=1445, type=Transpose]; +"1446 Shape_1154" [id=1446, type=Shape]; +"1447 Constant_1155" [id=1447, type=Constant]; +"1448 Gather_1156" [id=1448, type=Gather]; +"1449 Shape_1157" [id=1449, type=Shape]; +"1450 Constant_1158" [id=1450, type=Constant]; +"1451 Gather_1159" [id=1451, type=Gather]; +"1452 Shape_1160" [id=1452, type=Shape]; +"1453 Constant_1161" [id=1453, type=Constant]; +"1454 Gather_1162" [id=1454, type=Gather]; +"1455 Constant_1163" [id=1455, type=Constant]; +"1456 Div_1164" [id=1456, type=Div]; +"1457 Cast_1165" [id=1457, type=Cast]; +"1458 Cast_1166" [id=1458, type=Cast]; +"1459 Constant_nncf_1347" [id=1459, type=Constant]; +"1460 Unsqueeze_1167" [id=1460, type=Unsqueeze]; +"1461 Constant_nncf_1349" [id=1461, type=Constant]; +"1462 Unsqueeze_1168" [id=1462, type=Unsqueeze]; +"1463 Constant_nncf_1351" [id=1463, type=Constant]; +"1464 Unsqueeze_1169" [id=1464, type=Unsqueeze]; +"1465 Concat_1170" [id=1465, type=Concat]; +"1466 Reshape_1171" [id=1466, type=Reshape]; +"1467 Transpose_1172" [id=1467, type=Transpose]; +"1468 Transpose_1173" [id=1468, type=Transpose]; +"1469 Constant_nncf_1357" [id=1469, type=Constant]; +"1470 Unsqueeze_1174" [id=1470, type=Unsqueeze]; +"1471 Constant_nncf_1359" [id=1471, type=Constant]; +"1472 Unsqueeze_1175" [id=1472, type=Unsqueeze]; +"1473 Concat_1176" [id=1473, type=Concat]; +"1474 MatMul_1177" [id=1474, type=MatMul]; +"1475 Constant_1178" [id=1475, type=Constant]; +"1476 Div_1179" [id=1476, type=Div]; +"1477 Shape_1180" [id=1477, type=Shape]; +"1478 Constant_1181" [id=1478, type=Constant]; +"1479 Gather_1182" [id=1479, type=Gather]; +"1480 Shape_1183" [id=1480, type=Shape]; +"1481 Constant_1184" [id=1481, type=Constant]; +"1482 Gather_1185" [id=1482, type=Gather]; +"1483 Sub_1186" [id=1483, type=Sub]; +"1484 Constant_nncf_1372" [id=1484, type=Constant]; +"1485 Unsqueeze_1187" [id=1485, type=Unsqueeze]; +"1486 Constant_nncf_1374" [id=1486, type=Constant]; +"1487 Unsqueeze_1188" [id=1487, type=Unsqueeze]; +"1488 Constant_1189" [id=1488, type=Constant]; +"1489 Slice_1190" [id=1489, type=Slice]; +"1490 Constant_nncf_1378" [id=1490, type=Constant]; +"1491 Unsqueeze_1191" [id=1491, type=Unsqueeze]; +"1492 Constant_1192" [id=1492, type=Constant]; +"1493 Slice_1193" [id=1493, type=Slice]; +"1494 Mul_1194" [id=1494, type=Mul]; +"1495 Constant_1195" [id=1495, type=Constant]; +"1496 Sub_1196" [id=1496, type=Sub]; +"1497 Constant_1197" [id=1497, type=Constant]; +"1498 Mul_1198" [id=1498, type=Mul]; +"1499 Sub_1199" [id=1499, type=Sub]; +"1500 Shape_nncf_1388" [id=1500, type=Shape]; +"1501 Flatten_nncf_1389" [id=1501, type=Flatten]; +"1502 Softmax_1200" [id=1502, type=Softmax]; +"1503 Reshape_nncf_1391" [id=1503, type=Reshape]; +"1504 MatMul_1201" [id=1504, type=MatMul]; +"1505 QuantizeLinear_1538_1" [id=1505, type=QuantizeLinear]; +"1506 DequantizeLinear_1538_1" [id=1506, type=DequantizeLinear]; +"1507 Transpose_1202" [id=1507, type=Transpose]; +"1508 Shape_1203" [id=1508, type=Shape]; +"1509 Constant_1204" [id=1509, type=Constant]; +"1510 Gather_1205" [id=1510, type=Gather]; +"1511 Shape_1206" [id=1511, type=Shape]; +"1512 Constant_1207" [id=1512, type=Constant]; +"1513 Gather_1208" [id=1513, type=Gather]; +"1514 Shape_1209" [id=1514, type=Shape]; +"1515 Constant_1210" [id=1515, type=Constant]; +"1516 Gather_1211" [id=1516, type=Gather]; +"1517 Shape_1212" [id=1517, type=Shape]; +"1518 Constant_1213" [id=1518, type=Constant]; +"1519 Gather_1214" [id=1519, type=Gather]; +"1520 Mul_1215" [id=1520, type=Mul]; +"1521 Constant_nncf_1407" [id=1521, type=Constant]; +"1522 Unsqueeze_1216" [id=1522, type=Unsqueeze]; +"1523 Constant_nncf_1409" [id=1523, type=Constant]; +"1524 Unsqueeze_1217" [id=1524, type=Unsqueeze]; +"1525 Constant_nncf_1411" [id=1525, type=Constant]; +"1526 Unsqueeze_1218" [id=1526, type=Unsqueeze]; +"1527 Concat_1219" [id=1527, type=Concat]; +"1528 Reshape_1220" [id=1528, type=Reshape]; +"1529 Shape_1221" [id=1529, type=Shape]; +"1530 Constant_1222" [id=1530, type=Constant]; +"1531 Gather_1223" [id=1531, type=Gather]; +"1532 Shape_1224" [id=1532, type=Shape]; +"1533 Constant_1225" [id=1533, type=Constant]; +"1534 Gather_1226" [id=1534, type=Gather]; +"1535 Shape_1227" [id=1535, type=Shape]; +"1536 Constant_1228" [id=1536, type=Constant]; +"1537 Gather_1229" [id=1537, type=Gather]; +"1538 Constant_nncf_1424" [id=1538, type=Constant]; +"1539 Unsqueeze_1230" [id=1539, type=Unsqueeze]; +"1540 Concat_1231" [id=1540, type=Concat]; +"1541 Reshape_1232" [id=1541, type=Reshape]; +"1542 QuantizeLinear_h.5.attn.c_proj.weight_1" [id=1542, type=QuantizeLinear]; +"1543 DequantizeLinear_h.5.attn.c_proj.weight_1" [id=1543, type=DequantizeLinear]; +"1544 Gemm_1233" [id=1544, type=Gemm]; +"1545 Constant_nncf_1429" [id=1545, type=Constant]; +"1546 Unsqueeze_1234" [id=1546, type=Unsqueeze]; +"1547 Constant_nncf_1431" [id=1547, type=Constant]; +"1548 Unsqueeze_1235" [id=1548, type=Unsqueeze]; +"1549 Concat_1236" [id=1549, type=Concat]; +"1550 Reshape_1237" [id=1550, type=Reshape]; +"1551 Add_1238" [id=1551, type=Add]; +"1552 ReduceMean_1239" [id=1552, type=ReduceMean]; +"1553 Sub_1240" [id=1553, type=Sub]; +"1554 Constant_1241" [id=1554, type=Constant]; +"1555 Pow_1242" [id=1555, type=Pow]; +"1556 ReduceMean_1243" [id=1556, type=ReduceMean]; +"1557 Constant_1244" [id=1557, type=Constant]; +"1558 Add_1245" [id=1558, type=Add]; +"1559 Sqrt_1246" [id=1559, type=Sqrt]; +"1560 Div_1247" [id=1560, type=Div]; +"1561 Mul_1248" [id=1561, type=Mul]; +"1562 Add_1249" [id=1562, type=Add]; +"1563 QuantizeLinear_1590_1" [id=1563, type=QuantizeLinear]; +"1564 DequantizeLinear_1590_1" [id=1564, type=DequantizeLinear]; +"1565 Shape_1250" [id=1565, type=Shape]; +"1566 Constant_1251" [id=1566, type=Constant]; +"1567 Gather_1252" [id=1567, type=Gather]; +"1568 Shape_1253" [id=1568, type=Shape]; +"1569 Constant_1254" [id=1569, type=Constant]; +"1570 Gather_1255" [id=1570, type=Gather]; +"1571 Shape_1256" [id=1571, type=Shape]; +"1572 Constant_1257" [id=1572, type=Constant]; +"1573 Gather_1258" [id=1573, type=Gather]; +"1574 Constant_nncf_1456" [id=1574, type=Constant]; +"1575 Unsqueeze_1259" [id=1575, type=Unsqueeze]; +"1576 Concat_1260" [id=1576, type=Concat]; +"1577 Reshape_1261" [id=1577, type=Reshape]; +"1578 QuantizeLinear_h.5.mlp.c_fc.weight_1" [id=1578, type=QuantizeLinear]; +"1579 DequantizeLinear_h.5.mlp.c_fc.weight_1" [id=1579, type=DequantizeLinear]; +"1580 Gemm_1262" [id=1580, type=Gemm]; +"1581 Constant_nncf_1461" [id=1581, type=Constant]; +"1582 Unsqueeze_1263" [id=1582, type=Unsqueeze]; +"1583 Constant_nncf_1463" [id=1583, type=Constant]; +"1584 Unsqueeze_1264" [id=1584, type=Unsqueeze]; +"1585 Concat_1265" [id=1585, type=Concat]; +"1586 Reshape_1266" [id=1586, type=Reshape]; +"1587 Constant_1267" [id=1587, type=Constant]; +"1588 Mul_1268" [id=1588, type=Mul]; +"1589 Constant_1269" [id=1589, type=Constant]; +"1590 Pow_1270" [id=1590, type=Pow]; +"1591 Constant_1271" [id=1591, type=Constant]; +"1592 Mul_1272" [id=1592, type=Mul]; +"1593 Add_1273" [id=1593, type=Add]; +"1594 Constant_1274" [id=1594, type=Constant]; +"1595 Mul_1275" [id=1595, type=Mul]; +"1596 Tanh_1276" [id=1596, type=Tanh]; +"1597 Constant_1277" [id=1597, type=Constant]; +"1598 Add_1278" [id=1598, type=Add]; +"1599 Mul_1279" [id=1599, type=Mul]; +"1600 QuantizeLinear_1624_1" [id=1600, type=QuantizeLinear]; +"1601 DequantizeLinear_1624_1" [id=1601, type=DequantizeLinear]; +"1602 Shape_1280" [id=1602, type=Shape]; +"1603 Constant_1281" [id=1603, type=Constant]; +"1604 Gather_1282" [id=1604, type=Gather]; +"1605 Shape_1283" [id=1605, type=Shape]; +"1606 Constant_1284" [id=1606, type=Constant]; +"1607 Gather_1285" [id=1607, type=Gather]; +"1608 Shape_1286" [id=1608, type=Shape]; +"1609 Constant_1287" [id=1609, type=Constant]; +"1610 Gather_1288" [id=1610, type=Gather]; +"1611 Constant_nncf_1489" [id=1611, type=Constant]; +"1612 Unsqueeze_1289" [id=1612, type=Unsqueeze]; +"1613 Concat_1290" [id=1613, type=Concat]; +"1614 Reshape_1291" [id=1614, type=Reshape]; +"1615 QuantizeLinear_h.5.mlp.c_proj.weight_1" [id=1615, type=QuantizeLinear]; +"1616 DequantizeLinear_h.5.mlp.c_proj.weight_1" [id=1616, type=DequantizeLinear]; +"1617 Gemm_1292" [id=1617, type=Gemm]; +"1618 Constant_nncf_1494" [id=1618, type=Constant]; +"1619 Unsqueeze_1293" [id=1619, type=Unsqueeze]; +"1620 Constant_nncf_1496" [id=1620, type=Constant]; +"1621 Unsqueeze_1294" [id=1621, type=Unsqueeze]; +"1622 Concat_1295" [id=1622, type=Concat]; +"1623 Reshape_1296" [id=1623, type=Reshape]; +"1624 Add_1297" [id=1624, type=Add]; +"1625 ReduceMean_1298" [id=1625, type=ReduceMean]; +"1626 Sub_1299" [id=1626, type=Sub]; +"1627 Constant_1300" [id=1627, type=Constant]; +"1628 Pow_1301" [id=1628, type=Pow]; +"1629 ReduceMean_1302" [id=1629, type=ReduceMean]; +"1630 Constant_1303" [id=1630, type=Constant]; +"1631 Add_1304" [id=1631, type=Add]; +"1632 Sqrt_1305" [id=1632, type=Sqrt]; +"1633 Div_1306" [id=1633, type=Div]; +"1634 Mul_1307" [id=1634, type=Mul]; +"1635 Add_1308" [id=1635, type=Add]; +"1636 QuantizeLinear_1657_1" [id=1636, type=QuantizeLinear]; +"1637 DequantizeLinear_1657_1" [id=1637, type=DequantizeLinear]; +"1638 Shape_1309" [id=1638, type=Shape]; +"1639 Constant_1310" [id=1639, type=Constant]; +"1640 Gather_1311" [id=1640, type=Gather]; +"1641 Shape_1312" [id=1641, type=Shape]; +"1642 Constant_1313" [id=1642, type=Constant]; +"1643 Gather_1314" [id=1643, type=Gather]; +"1644 Shape_1315" [id=1644, type=Shape]; +"1645 Constant_1316" [id=1645, type=Constant]; +"1646 Gather_1317" [id=1646, type=Gather]; +"1647 Constant_nncf_1521" [id=1647, type=Constant]; +"1648 Unsqueeze_1318" [id=1648, type=Unsqueeze]; +"1649 Concat_1319" [id=1649, type=Concat]; +"1650 Reshape_1320" [id=1650, type=Reshape]; +"1651 QuantizeLinear_h.6.attn.c_attn.weight_1" [id=1651, type=QuantizeLinear]; +"1652 DequantizeLinear_h.6.attn.c_attn.weight_1" [id=1652, type=DequantizeLinear]; +"1653 Gemm_1321" [id=1653, type=Gemm]; +"1654 Constant_nncf_1526" [id=1654, type=Constant]; +"1655 Unsqueeze_1322" [id=1655, type=Unsqueeze]; +"1656 Constant_nncf_1528" [id=1656, type=Constant]; +"1657 Unsqueeze_1323" [id=1657, type=Unsqueeze]; +"1658 Concat_1324" [id=1658, type=Concat]; +"1659 Reshape_1325" [id=1659, type=Reshape]; +"1660 Constant_nncf_1532" [id=1660, type=Constant]; +"1661 Split_1326" [id=1661, type=Split]; +"1662 QuantizeLinear_query.13_1" [id=1662, type=QuantizeLinear]; +"1663 DequantizeLinear_query.13_1" [id=1663, type=DequantizeLinear]; +"1664 Shape_1327" [id=1664, type=Shape]; +"1665 Constant_1328" [id=1665, type=Constant]; +"1666 Gather_1329" [id=1666, type=Gather]; +"1667 Shape_1330" [id=1667, type=Shape]; +"1668 Constant_1331" [id=1668, type=Constant]; +"1669 Gather_1332" [id=1669, type=Gather]; +"1670 Shape_1333" [id=1670, type=Shape]; +"1671 Constant_1334" [id=1671, type=Constant]; +"1672 Gather_1335" [id=1672, type=Gather]; +"1673 Constant_1336" [id=1673, type=Constant]; +"1674 Div_1337" [id=1674, type=Div]; +"1675 Cast_1338" [id=1675, type=Cast]; +"1676 Cast_1339" [id=1676, type=Cast]; +"1677 Constant_nncf_1547" [id=1677, type=Constant]; +"1678 Unsqueeze_1340" [id=1678, type=Unsqueeze]; +"1679 Constant_nncf_1549" [id=1679, type=Constant]; +"1680 Unsqueeze_1341" [id=1680, type=Unsqueeze]; +"1681 Constant_nncf_1551" [id=1681, type=Constant]; +"1682 Unsqueeze_1342" [id=1682, type=Unsqueeze]; +"1683 Concat_1343" [id=1683, type=Concat]; +"1684 Reshape_1344" [id=1684, type=Reshape]; +"1685 Transpose_1345" [id=1685, type=Transpose]; +"1686 Shape_1346" [id=1686, type=Shape]; +"1687 Constant_1347" [id=1687, type=Constant]; +"1688 Gather_1348" [id=1688, type=Gather]; +"1689 Shape_1349" [id=1689, type=Shape]; +"1690 Constant_1350" [id=1690, type=Constant]; +"1691 Gather_1351" [id=1691, type=Gather]; +"1692 Shape_1352" [id=1692, type=Shape]; +"1693 Constant_1353" [id=1693, type=Constant]; +"1694 Gather_1354" [id=1694, type=Gather]; +"1695 Constant_1355" [id=1695, type=Constant]; +"1696 Div_1356" [id=1696, type=Div]; +"1697 Cast_1357" [id=1697, type=Cast]; +"1698 Cast_1358" [id=1698, type=Cast]; +"1699 Constant_nncf_1569" [id=1699, type=Constant]; +"1700 Unsqueeze_1359" [id=1700, type=Unsqueeze]; +"1701 Constant_nncf_1571" [id=1701, type=Constant]; +"1702 Unsqueeze_1360" [id=1702, type=Unsqueeze]; +"1703 Constant_nncf_1573" [id=1703, type=Constant]; +"1704 Unsqueeze_1361" [id=1704, type=Unsqueeze]; +"1705 Concat_1362" [id=1705, type=Concat]; +"1706 Reshape_1363" [id=1706, type=Reshape]; +"1707 QuantizeLinear_1722_1" [id=1707, type=QuantizeLinear]; +"1708 DequantizeLinear_1722_1" [id=1708, type=DequantizeLinear]; +"1709 Transpose_1364" [id=1709, type=Transpose]; +"1710 Shape_1365" [id=1710, type=Shape]; +"1711 Constant_1366" [id=1711, type=Constant]; +"1712 Gather_1367" [id=1712, type=Gather]; +"1713 Shape_1368" [id=1713, type=Shape]; +"1714 Constant_1369" [id=1714, type=Constant]; +"1715 Gather_1370" [id=1715, type=Gather]; +"1716 Shape_1371" [id=1716, type=Shape]; +"1717 Constant_1372" [id=1717, type=Constant]; +"1718 Gather_1373" [id=1718, type=Gather]; +"1719 Constant_1374" [id=1719, type=Constant]; +"1720 Div_1375" [id=1720, type=Div]; +"1721 Cast_1376" [id=1721, type=Cast]; +"1722 Cast_1377" [id=1722, type=Cast]; +"1723 Constant_nncf_1591" [id=1723, type=Constant]; +"1724 Unsqueeze_1378" [id=1724, type=Unsqueeze]; +"1725 Constant_nncf_1593" [id=1725, type=Constant]; +"1726 Unsqueeze_1379" [id=1726, type=Unsqueeze]; +"1727 Constant_nncf_1595" [id=1727, type=Constant]; +"1728 Unsqueeze_1380" [id=1728, type=Unsqueeze]; +"1729 Concat_1381" [id=1729, type=Concat]; +"1730 Reshape_1382" [id=1730, type=Reshape]; +"1731 Transpose_1383" [id=1731, type=Transpose]; +"1732 Transpose_1384" [id=1732, type=Transpose]; +"1733 Constant_nncf_1601" [id=1733, type=Constant]; +"1734 Unsqueeze_1385" [id=1734, type=Unsqueeze]; +"1735 Constant_nncf_1603" [id=1735, type=Constant]; +"1736 Unsqueeze_1386" [id=1736, type=Unsqueeze]; +"1737 Concat_1387" [id=1737, type=Concat]; +"1738 MatMul_1388" [id=1738, type=MatMul]; +"1739 Constant_1389" [id=1739, type=Constant]; +"1740 Div_1390" [id=1740, type=Div]; +"1741 Shape_1391" [id=1741, type=Shape]; +"1742 Constant_1392" [id=1742, type=Constant]; +"1743 Gather_1393" [id=1743, type=Gather]; +"1744 Shape_1394" [id=1744, type=Shape]; +"1745 Constant_1395" [id=1745, type=Constant]; +"1746 Gather_1396" [id=1746, type=Gather]; +"1747 Sub_1397" [id=1747, type=Sub]; +"1748 Constant_nncf_1616" [id=1748, type=Constant]; +"1749 Unsqueeze_1398" [id=1749, type=Unsqueeze]; +"1750 Constant_nncf_1618" [id=1750, type=Constant]; +"1751 Unsqueeze_1399" [id=1751, type=Unsqueeze]; +"1752 Constant_1400" [id=1752, type=Constant]; +"1753 Slice_1401" [id=1753, type=Slice]; +"1754 Constant_nncf_1622" [id=1754, type=Constant]; +"1755 Unsqueeze_1402" [id=1755, type=Unsqueeze]; +"1756 Constant_1403" [id=1756, type=Constant]; +"1757 Slice_1404" [id=1757, type=Slice]; +"1758 Mul_1405" [id=1758, type=Mul]; +"1759 Constant_1406" [id=1759, type=Constant]; +"1760 Sub_1407" [id=1760, type=Sub]; +"1761 Constant_1408" [id=1761, type=Constant]; +"1762 Mul_1409" [id=1762, type=Mul]; +"1763 Sub_1410" [id=1763, type=Sub]; +"1764 Shape_nncf_1632" [id=1764, type=Shape]; +"1765 Flatten_nncf_1633" [id=1765, type=Flatten]; +"1766 Softmax_1411" [id=1766, type=Softmax]; +"1767 Reshape_nncf_1635" [id=1767, type=Reshape]; +"1768 MatMul_1412" [id=1768, type=MatMul]; +"1769 QuantizeLinear_1779_1" [id=1769, type=QuantizeLinear]; +"1770 DequantizeLinear_1779_1" [id=1770, type=DequantizeLinear]; +"1771 Transpose_1413" [id=1771, type=Transpose]; +"1772 Shape_1414" [id=1772, type=Shape]; +"1773 Constant_1415" [id=1773, type=Constant]; +"1774 Gather_1416" [id=1774, type=Gather]; +"1775 Shape_1417" [id=1775, type=Shape]; +"1776 Constant_1418" [id=1776, type=Constant]; +"1777 Gather_1419" [id=1777, type=Gather]; +"1778 Shape_1420" [id=1778, type=Shape]; +"1779 Constant_1421" [id=1779, type=Constant]; +"1780 Gather_1422" [id=1780, type=Gather]; +"1781 Shape_1423" [id=1781, type=Shape]; +"1782 Constant_1424" [id=1782, type=Constant]; +"1783 Gather_1425" [id=1783, type=Gather]; +"1784 Mul_1426" [id=1784, type=Mul]; +"1785 Constant_nncf_1651" [id=1785, type=Constant]; +"1786 Unsqueeze_1427" [id=1786, type=Unsqueeze]; +"1787 Constant_nncf_1653" [id=1787, type=Constant]; +"1788 Unsqueeze_1428" [id=1788, type=Unsqueeze]; +"1789 Constant_nncf_1655" [id=1789, type=Constant]; +"1790 Unsqueeze_1429" [id=1790, type=Unsqueeze]; +"1791 Concat_1430" [id=1791, type=Concat]; +"1792 Reshape_1431" [id=1792, type=Reshape]; +"1793 Shape_1432" [id=1793, type=Shape]; +"1794 Constant_1433" [id=1794, type=Constant]; +"1795 Gather_1434" [id=1795, type=Gather]; +"1796 Shape_1435" [id=1796, type=Shape]; +"1797 Constant_1436" [id=1797, type=Constant]; +"1798 Gather_1437" [id=1798, type=Gather]; +"1799 Shape_1438" [id=1799, type=Shape]; +"1800 Constant_1439" [id=1800, type=Constant]; +"1801 Gather_1440" [id=1801, type=Gather]; +"1802 Constant_nncf_1668" [id=1802, type=Constant]; +"1803 Unsqueeze_1441" [id=1803, type=Unsqueeze]; +"1804 Concat_1442" [id=1804, type=Concat]; +"1805 Reshape_1443" [id=1805, type=Reshape]; +"1806 QuantizeLinear_h.6.attn.c_proj.weight_1" [id=1806, type=QuantizeLinear]; +"1807 DequantizeLinear_h.6.attn.c_proj.weight_1" [id=1807, type=DequantizeLinear]; +"1808 Gemm_1444" [id=1808, type=Gemm]; +"1809 Constant_nncf_1673" [id=1809, type=Constant]; +"1810 Unsqueeze_1445" [id=1810, type=Unsqueeze]; +"1811 Constant_nncf_1675" [id=1811, type=Constant]; +"1812 Unsqueeze_1446" [id=1812, type=Unsqueeze]; +"1813 Concat_1447" [id=1813, type=Concat]; +"1814 Reshape_1448" [id=1814, type=Reshape]; +"1815 Add_1449" [id=1815, type=Add]; +"1816 ReduceMean_1450" [id=1816, type=ReduceMean]; +"1817 Sub_1451" [id=1817, type=Sub]; +"1818 Constant_1452" [id=1818, type=Constant]; +"1819 Pow_1453" [id=1819, type=Pow]; +"1820 ReduceMean_1454" [id=1820, type=ReduceMean]; +"1821 Constant_1455" [id=1821, type=Constant]; +"1822 Add_1456" [id=1822, type=Add]; +"1823 Sqrt_1457" [id=1823, type=Sqrt]; +"1824 Div_1458" [id=1824, type=Div]; +"1825 Mul_1459" [id=1825, type=Mul]; +"1826 Add_1460" [id=1826, type=Add]; +"1827 QuantizeLinear_1831_1" [id=1827, type=QuantizeLinear]; +"1828 DequantizeLinear_1831_1" [id=1828, type=DequantizeLinear]; +"1829 Shape_1461" [id=1829, type=Shape]; +"1830 Constant_1462" [id=1830, type=Constant]; +"1831 Gather_1463" [id=1831, type=Gather]; +"1832 Shape_1464" [id=1832, type=Shape]; +"1833 Constant_1465" [id=1833, type=Constant]; +"1834 Gather_1466" [id=1834, type=Gather]; +"1835 Shape_1467" [id=1835, type=Shape]; +"1836 Constant_1468" [id=1836, type=Constant]; +"1837 Gather_1469" [id=1837, type=Gather]; +"1838 Constant_nncf_1700" [id=1838, type=Constant]; +"1839 Unsqueeze_1470" [id=1839, type=Unsqueeze]; +"1840 Concat_1471" [id=1840, type=Concat]; +"1841 Reshape_1472" [id=1841, type=Reshape]; +"1842 QuantizeLinear_h.6.mlp.c_fc.weight_1" [id=1842, type=QuantizeLinear]; +"1843 DequantizeLinear_h.6.mlp.c_fc.weight_1" [id=1843, type=DequantizeLinear]; +"1844 Gemm_1473" [id=1844, type=Gemm]; +"1845 Constant_nncf_1705" [id=1845, type=Constant]; +"1846 Unsqueeze_1474" [id=1846, type=Unsqueeze]; +"1847 Constant_nncf_1707" [id=1847, type=Constant]; +"1848 Unsqueeze_1475" [id=1848, type=Unsqueeze]; +"1849 Concat_1476" [id=1849, type=Concat]; +"1850 Reshape_1477" [id=1850, type=Reshape]; +"1851 Constant_1478" [id=1851, type=Constant]; +"1852 Mul_1479" [id=1852, type=Mul]; +"1853 Constant_1480" [id=1853, type=Constant]; +"1854 Pow_1481" [id=1854, type=Pow]; +"1855 Constant_1482" [id=1855, type=Constant]; +"1856 Mul_1483" [id=1856, type=Mul]; +"1857 Add_1484" [id=1857, type=Add]; +"1858 Constant_1485" [id=1858, type=Constant]; +"1859 Mul_1486" [id=1859, type=Mul]; +"1860 Tanh_1487" [id=1860, type=Tanh]; +"1861 Constant_1488" [id=1861, type=Constant]; +"1862 Add_1489" [id=1862, type=Add]; +"1863 Mul_1490" [id=1863, type=Mul]; +"1864 QuantizeLinear_1865_1" [id=1864, type=QuantizeLinear]; +"1865 DequantizeLinear_1865_1" [id=1865, type=DequantizeLinear]; +"1866 Shape_1491" [id=1866, type=Shape]; +"1867 Constant_1492" [id=1867, type=Constant]; +"1868 Gather_1493" [id=1868, type=Gather]; +"1869 Shape_1494" [id=1869, type=Shape]; +"1870 Constant_1495" [id=1870, type=Constant]; +"1871 Gather_1496" [id=1871, type=Gather]; +"1872 Shape_1497" [id=1872, type=Shape]; +"1873 Constant_1498" [id=1873, type=Constant]; +"1874 Gather_1499" [id=1874, type=Gather]; +"1875 Constant_nncf_1733" [id=1875, type=Constant]; +"1876 Unsqueeze_1500" [id=1876, type=Unsqueeze]; +"1877 Concat_1501" [id=1877, type=Concat]; +"1878 Reshape_1502" [id=1878, type=Reshape]; +"1879 QuantizeLinear_h.6.mlp.c_proj.weight_1" [id=1879, type=QuantizeLinear]; +"1880 DequantizeLinear_h.6.mlp.c_proj.weight_1" [id=1880, type=DequantizeLinear]; +"1881 Gemm_1503" [id=1881, type=Gemm]; +"1882 Constant_nncf_1738" [id=1882, type=Constant]; +"1883 Unsqueeze_1504" [id=1883, type=Unsqueeze]; +"1884 Constant_nncf_1740" [id=1884, type=Constant]; +"1885 Unsqueeze_1505" [id=1885, type=Unsqueeze]; +"1886 Concat_1506" [id=1886, type=Concat]; +"1887 Reshape_1507" [id=1887, type=Reshape]; +"1888 Add_1508" [id=1888, type=Add]; +"1889 ReduceMean_1509" [id=1889, type=ReduceMean]; +"1890 Sub_1510" [id=1890, type=Sub]; +"1891 Constant_1511" [id=1891, type=Constant]; +"1892 Pow_1512" [id=1892, type=Pow]; +"1893 ReduceMean_1513" [id=1893, type=ReduceMean]; +"1894 Constant_1514" [id=1894, type=Constant]; +"1895 Add_1515" [id=1895, type=Add]; +"1896 Sqrt_1516" [id=1896, type=Sqrt]; +"1897 Div_1517" [id=1897, type=Div]; +"1898 Mul_1518" [id=1898, type=Mul]; +"1899 Add_1519" [id=1899, type=Add]; +"1900 QuantizeLinear_1898_1" [id=1900, type=QuantizeLinear]; +"1901 DequantizeLinear_1898_1" [id=1901, type=DequantizeLinear]; +"1902 Shape_1520" [id=1902, type=Shape]; +"1903 Constant_1521" [id=1903, type=Constant]; +"1904 Gather_1522" [id=1904, type=Gather]; +"1905 Shape_1523" [id=1905, type=Shape]; +"1906 Constant_1524" [id=1906, type=Constant]; +"1907 Gather_1525" [id=1907, type=Gather]; +"1908 Shape_1526" [id=1908, type=Shape]; +"1909 Constant_1527" [id=1909, type=Constant]; +"1910 Gather_1528" [id=1910, type=Gather]; +"1911 Constant_nncf_1765" [id=1911, type=Constant]; +"1912 Unsqueeze_1529" [id=1912, type=Unsqueeze]; +"1913 Concat_1530" [id=1913, type=Concat]; +"1914 Reshape_1531" [id=1914, type=Reshape]; +"1915 QuantizeLinear_h.7.attn.c_attn.weight_1" [id=1915, type=QuantizeLinear]; +"1916 DequantizeLinear_h.7.attn.c_attn.weight_1" [id=1916, type=DequantizeLinear]; +"1917 Gemm_1532" [id=1917, type=Gemm]; +"1918 Constant_nncf_1770" [id=1918, type=Constant]; +"1919 Unsqueeze_1533" [id=1919, type=Unsqueeze]; +"1920 Constant_nncf_1772" [id=1920, type=Constant]; +"1921 Unsqueeze_1534" [id=1921, type=Unsqueeze]; +"1922 Concat_1535" [id=1922, type=Concat]; +"1923 Reshape_1536" [id=1923, type=Reshape]; +"1924 Constant_nncf_1776" [id=1924, type=Constant]; +"1925 Split_1537" [id=1925, type=Split]; +"1926 QuantizeLinear_query.15_1" [id=1926, type=QuantizeLinear]; +"1927 DequantizeLinear_query.15_1" [id=1927, type=DequantizeLinear]; +"1928 Shape_1538" [id=1928, type=Shape]; +"1929 Constant_1539" [id=1929, type=Constant]; +"1930 Gather_1540" [id=1930, type=Gather]; +"1931 Shape_1541" [id=1931, type=Shape]; +"1932 Constant_1542" [id=1932, type=Constant]; +"1933 Gather_1543" [id=1933, type=Gather]; +"1934 Shape_1544" [id=1934, type=Shape]; +"1935 Constant_1545" [id=1935, type=Constant]; +"1936 Gather_1546" [id=1936, type=Gather]; +"1937 Constant_1547" [id=1937, type=Constant]; +"1938 Div_1548" [id=1938, type=Div]; +"1939 Cast_1549" [id=1939, type=Cast]; +"1940 Cast_1550" [id=1940, type=Cast]; +"1941 Constant_nncf_1791" [id=1941, type=Constant]; +"1942 Unsqueeze_1551" [id=1942, type=Unsqueeze]; +"1943 Constant_nncf_1793" [id=1943, type=Constant]; +"1944 Unsqueeze_1552" [id=1944, type=Unsqueeze]; +"1945 Constant_nncf_1795" [id=1945, type=Constant]; +"1946 Unsqueeze_1553" [id=1946, type=Unsqueeze]; +"1947 Concat_1554" [id=1947, type=Concat]; +"1948 Reshape_1555" [id=1948, type=Reshape]; +"1949 Transpose_1556" [id=1949, type=Transpose]; +"1950 Shape_1557" [id=1950, type=Shape]; +"1951 Constant_1558" [id=1951, type=Constant]; +"1952 Gather_1559" [id=1952, type=Gather]; +"1953 Shape_1560" [id=1953, type=Shape]; +"1954 Constant_1561" [id=1954, type=Constant]; +"1955 Gather_1562" [id=1955, type=Gather]; +"1956 Shape_1563" [id=1956, type=Shape]; +"1957 Constant_1564" [id=1957, type=Constant]; +"1958 Gather_1565" [id=1958, type=Gather]; +"1959 Constant_1566" [id=1959, type=Constant]; +"1960 Div_1567" [id=1960, type=Div]; +"1961 Cast_1568" [id=1961, type=Cast]; +"1962 Cast_1569" [id=1962, type=Cast]; +"1963 Constant_nncf_1813" [id=1963, type=Constant]; +"1964 Unsqueeze_1570" [id=1964, type=Unsqueeze]; +"1965 Constant_nncf_1815" [id=1965, type=Constant]; +"1966 Unsqueeze_1571" [id=1966, type=Unsqueeze]; +"1967 Constant_nncf_1817" [id=1967, type=Constant]; +"1968 Unsqueeze_1572" [id=1968, type=Unsqueeze]; +"1969 Concat_1573" [id=1969, type=Concat]; +"1970 Reshape_1574" [id=1970, type=Reshape]; +"1971 QuantizeLinear_1963_1" [id=1971, type=QuantizeLinear]; +"1972 DequantizeLinear_1963_1" [id=1972, type=DequantizeLinear]; +"1973 Transpose_1575" [id=1973, type=Transpose]; +"1974 Shape_1576" [id=1974, type=Shape]; +"1975 Constant_1577" [id=1975, type=Constant]; +"1976 Gather_1578" [id=1976, type=Gather]; +"1977 Shape_1579" [id=1977, type=Shape]; +"1978 Constant_1580" [id=1978, type=Constant]; +"1979 Gather_1581" [id=1979, type=Gather]; +"1980 Shape_1582" [id=1980, type=Shape]; +"1981 Constant_1583" [id=1981, type=Constant]; +"1982 Gather_1584" [id=1982, type=Gather]; +"1983 Constant_1585" [id=1983, type=Constant]; +"1984 Div_1586" [id=1984, type=Div]; +"1985 Cast_1587" [id=1985, type=Cast]; +"1986 Cast_1588" [id=1986, type=Cast]; +"1987 Constant_nncf_1835" [id=1987, type=Constant]; +"1988 Unsqueeze_1589" [id=1988, type=Unsqueeze]; +"1989 Constant_nncf_1837" [id=1989, type=Constant]; +"1990 Unsqueeze_1590" [id=1990, type=Unsqueeze]; +"1991 Constant_nncf_1839" [id=1991, type=Constant]; +"1992 Unsqueeze_1591" [id=1992, type=Unsqueeze]; +"1993 Concat_1592" [id=1993, type=Concat]; +"1994 Reshape_1593" [id=1994, type=Reshape]; +"1995 Transpose_1594" [id=1995, type=Transpose]; +"1996 Transpose_1595" [id=1996, type=Transpose]; +"1997 Constant_nncf_1845" [id=1997, type=Constant]; +"1998 Unsqueeze_1596" [id=1998, type=Unsqueeze]; +"1999 Constant_nncf_1847" [id=1999, type=Constant]; +"2000 Unsqueeze_1597" [id=2000, type=Unsqueeze]; +"2001 Concat_1598" [id=2001, type=Concat]; +"2002 MatMul_1599" [id=2002, type=MatMul]; +"2003 Constant_1600" [id=2003, type=Constant]; +"2004 Div_1601" [id=2004, type=Div]; +"2005 Shape_1602" [id=2005, type=Shape]; +"2006 Constant_1603" [id=2006, type=Constant]; +"2007 Gather_1604" [id=2007, type=Gather]; +"2008 Shape_1605" [id=2008, type=Shape]; +"2009 Constant_1606" [id=2009, type=Constant]; +"2010 Gather_1607" [id=2010, type=Gather]; +"2011 Sub_1608" [id=2011, type=Sub]; +"2012 Constant_nncf_1860" [id=2012, type=Constant]; +"2013 Unsqueeze_1609" [id=2013, type=Unsqueeze]; +"2014 Constant_nncf_1862" [id=2014, type=Constant]; +"2015 Unsqueeze_1610" [id=2015, type=Unsqueeze]; +"2016 Constant_1611" [id=2016, type=Constant]; +"2017 Slice_1612" [id=2017, type=Slice]; +"2018 Constant_nncf_1866" [id=2018, type=Constant]; +"2019 Unsqueeze_1613" [id=2019, type=Unsqueeze]; +"2020 Constant_1614" [id=2020, type=Constant]; +"2021 Slice_1615" [id=2021, type=Slice]; +"2022 Mul_1616" [id=2022, type=Mul]; +"2023 Constant_1617" [id=2023, type=Constant]; +"2024 Sub_1618" [id=2024, type=Sub]; +"2025 Constant_1619" [id=2025, type=Constant]; +"2026 Mul_1620" [id=2026, type=Mul]; +"2027 Sub_1621" [id=2027, type=Sub]; +"2028 Shape_nncf_1876" [id=2028, type=Shape]; +"2029 Flatten_nncf_1877" [id=2029, type=Flatten]; +"2030 Softmax_1622" [id=2030, type=Softmax]; +"2031 Reshape_nncf_1879" [id=2031, type=Reshape]; +"2032 MatMul_1623" [id=2032, type=MatMul]; +"2033 QuantizeLinear_2020_1" [id=2033, type=QuantizeLinear]; +"2034 DequantizeLinear_2020_1" [id=2034, type=DequantizeLinear]; +"2035 Transpose_1624" [id=2035, type=Transpose]; +"2036 Shape_1625" [id=2036, type=Shape]; +"2037 Constant_1626" [id=2037, type=Constant]; +"2038 Gather_1627" [id=2038, type=Gather]; +"2039 Shape_1628" [id=2039, type=Shape]; +"2040 Constant_1629" [id=2040, type=Constant]; +"2041 Gather_1630" [id=2041, type=Gather]; +"2042 Shape_1631" [id=2042, type=Shape]; +"2043 Constant_1632" [id=2043, type=Constant]; +"2044 Gather_1633" [id=2044, type=Gather]; +"2045 Shape_1634" [id=2045, type=Shape]; +"2046 Constant_1635" [id=2046, type=Constant]; +"2047 Gather_1636" [id=2047, type=Gather]; +"2048 Mul_1637" [id=2048, type=Mul]; +"2049 Constant_nncf_1895" [id=2049, type=Constant]; +"2050 Unsqueeze_1638" [id=2050, type=Unsqueeze]; +"2051 Constant_nncf_1897" [id=2051, type=Constant]; +"2052 Unsqueeze_1639" [id=2052, type=Unsqueeze]; +"2053 Constant_nncf_1899" [id=2053, type=Constant]; +"2054 Unsqueeze_1640" [id=2054, type=Unsqueeze]; +"2055 Concat_1641" [id=2055, type=Concat]; +"2056 Reshape_1642" [id=2056, type=Reshape]; +"2057 Shape_1643" [id=2057, type=Shape]; +"2058 Constant_1644" [id=2058, type=Constant]; +"2059 Gather_1645" [id=2059, type=Gather]; +"2060 Shape_1646" [id=2060, type=Shape]; +"2061 Constant_1647" [id=2061, type=Constant]; +"2062 Gather_1648" [id=2062, type=Gather]; +"2063 Shape_1649" [id=2063, type=Shape]; +"2064 Constant_1650" [id=2064, type=Constant]; +"2065 Gather_1651" [id=2065, type=Gather]; +"2066 Constant_nncf_1912" [id=2066, type=Constant]; +"2067 Unsqueeze_1652" [id=2067, type=Unsqueeze]; +"2068 Concat_1653" [id=2068, type=Concat]; +"2069 Reshape_1654" [id=2069, type=Reshape]; +"2070 QuantizeLinear_h.7.attn.c_proj.weight_1" [id=2070, type=QuantizeLinear]; +"2071 DequantizeLinear_h.7.attn.c_proj.weight_1" [id=2071, type=DequantizeLinear]; +"2072 Gemm_1655" [id=2072, type=Gemm]; +"2073 Constant_nncf_1917" [id=2073, type=Constant]; +"2074 Unsqueeze_1656" [id=2074, type=Unsqueeze]; +"2075 Constant_nncf_1919" [id=2075, type=Constant]; +"2076 Unsqueeze_1657" [id=2076, type=Unsqueeze]; +"2077 Concat_1658" [id=2077, type=Concat]; +"2078 Reshape_1659" [id=2078, type=Reshape]; +"2079 Add_1660" [id=2079, type=Add]; +"2080 ReduceMean_1661" [id=2080, type=ReduceMean]; +"2081 Sub_1662" [id=2081, type=Sub]; +"2082 Constant_1663" [id=2082, type=Constant]; +"2083 Pow_1664" [id=2083, type=Pow]; +"2084 ReduceMean_1665" [id=2084, type=ReduceMean]; +"2085 Constant_1666" [id=2085, type=Constant]; +"2086 Add_1667" [id=2086, type=Add]; +"2087 Sqrt_1668" [id=2087, type=Sqrt]; +"2088 Div_1669" [id=2088, type=Div]; +"2089 Mul_1670" [id=2089, type=Mul]; +"2090 Add_1671" [id=2090, type=Add]; +"2091 QuantizeLinear_2072_1" [id=2091, type=QuantizeLinear]; +"2092 DequantizeLinear_2072_1" [id=2092, type=DequantizeLinear]; +"2093 Shape_1672" [id=2093, type=Shape]; +"2094 Constant_1673" [id=2094, type=Constant]; +"2095 Gather_1674" [id=2095, type=Gather]; +"2096 Shape_1675" [id=2096, type=Shape]; +"2097 Constant_1676" [id=2097, type=Constant]; +"2098 Gather_1677" [id=2098, type=Gather]; +"2099 Shape_1678" [id=2099, type=Shape]; +"2100 Constant_1679" [id=2100, type=Constant]; +"2101 Gather_1680" [id=2101, type=Gather]; +"2102 Constant_nncf_1944" [id=2102, type=Constant]; +"2103 Unsqueeze_1681" [id=2103, type=Unsqueeze]; +"2104 Concat_1682" [id=2104, type=Concat]; +"2105 Reshape_1683" [id=2105, type=Reshape]; +"2106 QuantizeLinear_h.7.mlp.c_fc.weight_1" [id=2106, type=QuantizeLinear]; +"2107 DequantizeLinear_h.7.mlp.c_fc.weight_1" [id=2107, type=DequantizeLinear]; +"2108 Gemm_1684" [id=2108, type=Gemm]; +"2109 Constant_nncf_1949" [id=2109, type=Constant]; +"2110 Unsqueeze_1685" [id=2110, type=Unsqueeze]; +"2111 Constant_nncf_1951" [id=2111, type=Constant]; +"2112 Unsqueeze_1686" [id=2112, type=Unsqueeze]; +"2113 Concat_1687" [id=2113, type=Concat]; +"2114 Reshape_1688" [id=2114, type=Reshape]; +"2115 Constant_1689" [id=2115, type=Constant]; +"2116 Mul_1690" [id=2116, type=Mul]; +"2117 Constant_1691" [id=2117, type=Constant]; +"2118 Pow_1692" [id=2118, type=Pow]; +"2119 Constant_1693" [id=2119, type=Constant]; +"2120 Mul_1694" [id=2120, type=Mul]; +"2121 Add_1695" [id=2121, type=Add]; +"2122 Constant_1696" [id=2122, type=Constant]; +"2123 Mul_1697" [id=2123, type=Mul]; +"2124 Tanh_1698" [id=2124, type=Tanh]; +"2125 Constant_1699" [id=2125, type=Constant]; +"2126 Add_1700" [id=2126, type=Add]; +"2127 Mul_1701" [id=2127, type=Mul]; +"2128 QuantizeLinear_2106_1" [id=2128, type=QuantizeLinear]; +"2129 DequantizeLinear_2106_1" [id=2129, type=DequantizeLinear]; +"2130 Shape_1702" [id=2130, type=Shape]; +"2131 Constant_1703" [id=2131, type=Constant]; +"2132 Gather_1704" [id=2132, type=Gather]; +"2133 Shape_1705" [id=2133, type=Shape]; +"2134 Constant_1706" [id=2134, type=Constant]; +"2135 Gather_1707" [id=2135, type=Gather]; +"2136 Shape_1708" [id=2136, type=Shape]; +"2137 Constant_1709" [id=2137, type=Constant]; +"2138 Gather_1710" [id=2138, type=Gather]; +"2139 Constant_nncf_1977" [id=2139, type=Constant]; +"2140 Unsqueeze_1711" [id=2140, type=Unsqueeze]; +"2141 Concat_1712" [id=2141, type=Concat]; +"2142 Reshape_1713" [id=2142, type=Reshape]; +"2143 QuantizeLinear_h.7.mlp.c_proj.weight_1" [id=2143, type=QuantizeLinear]; +"2144 DequantizeLinear_h.7.mlp.c_proj.weight_1" [id=2144, type=DequantizeLinear]; +"2145 Gemm_1714" [id=2145, type=Gemm]; +"2146 Constant_nncf_1982" [id=2146, type=Constant]; +"2147 Unsqueeze_1715" [id=2147, type=Unsqueeze]; +"2148 Constant_nncf_1984" [id=2148, type=Constant]; +"2149 Unsqueeze_1716" [id=2149, type=Unsqueeze]; +"2150 Concat_1717" [id=2150, type=Concat]; +"2151 Reshape_1718" [id=2151, type=Reshape]; +"2152 Add_1719" [id=2152, type=Add]; +"2153 ReduceMean_1720" [id=2153, type=ReduceMean]; +"2154 Sub_1721" [id=2154, type=Sub]; +"2155 Constant_1722" [id=2155, type=Constant]; +"2156 Pow_1723" [id=2156, type=Pow]; +"2157 ReduceMean_1724" [id=2157, type=ReduceMean]; +"2158 Constant_1725" [id=2158, type=Constant]; +"2159 Add_1726" [id=2159, type=Add]; +"2160 Sqrt_1727" [id=2160, type=Sqrt]; +"2161 Div_1728" [id=2161, type=Div]; +"2162 Mul_1729" [id=2162, type=Mul]; +"2163 Add_1730" [id=2163, type=Add]; +"2164 QuantizeLinear_2139_1" [id=2164, type=QuantizeLinear]; +"2165 DequantizeLinear_2139_1" [id=2165, type=DequantizeLinear]; +"2166 Shape_1731" [id=2166, type=Shape]; +"2167 Constant_1732" [id=2167, type=Constant]; +"2168 Gather_1733" [id=2168, type=Gather]; +"2169 Shape_1734" [id=2169, type=Shape]; +"2170 Constant_1735" [id=2170, type=Constant]; +"2171 Gather_1736" [id=2171, type=Gather]; +"2172 Shape_1737" [id=2172, type=Shape]; +"2173 Constant_1738" [id=2173, type=Constant]; +"2174 Gather_1739" [id=2174, type=Gather]; +"2175 Constant_nncf_2009" [id=2175, type=Constant]; +"2176 Unsqueeze_1740" [id=2176, type=Unsqueeze]; +"2177 Concat_1741" [id=2177, type=Concat]; +"2178 Reshape_1742" [id=2178, type=Reshape]; +"2179 QuantizeLinear_h.8.attn.c_attn.weight_1" [id=2179, type=QuantizeLinear]; +"2180 DequantizeLinear_h.8.attn.c_attn.weight_1" [id=2180, type=DequantizeLinear]; +"2181 Gemm_1743" [id=2181, type=Gemm]; +"2182 Constant_nncf_2014" [id=2182, type=Constant]; +"2183 Unsqueeze_1744" [id=2183, type=Unsqueeze]; +"2184 Constant_nncf_2016" [id=2184, type=Constant]; +"2185 Unsqueeze_1745" [id=2185, type=Unsqueeze]; +"2186 Concat_1746" [id=2186, type=Concat]; +"2187 Reshape_1747" [id=2187, type=Reshape]; +"2188 Constant_nncf_2020" [id=2188, type=Constant]; +"2189 Split_1748" [id=2189, type=Split]; +"2190 QuantizeLinear_query.17_1" [id=2190, type=QuantizeLinear]; +"2191 DequantizeLinear_query.17_1" [id=2191, type=DequantizeLinear]; +"2192 Shape_1749" [id=2192, type=Shape]; +"2193 Constant_1750" [id=2193, type=Constant]; +"2194 Gather_1751" [id=2194, type=Gather]; +"2195 Shape_1752" [id=2195, type=Shape]; +"2196 Constant_1753" [id=2196, type=Constant]; +"2197 Gather_1754" [id=2197, type=Gather]; +"2198 Shape_1755" [id=2198, type=Shape]; +"2199 Constant_1756" [id=2199, type=Constant]; +"2200 Gather_1757" [id=2200, type=Gather]; +"2201 Constant_1758" [id=2201, type=Constant]; +"2202 Div_1759" [id=2202, type=Div]; +"2203 Cast_1760" [id=2203, type=Cast]; +"2204 Cast_1761" [id=2204, type=Cast]; +"2205 Constant_nncf_2035" [id=2205, type=Constant]; +"2206 Unsqueeze_1762" [id=2206, type=Unsqueeze]; +"2207 Constant_nncf_2037" [id=2207, type=Constant]; +"2208 Unsqueeze_1763" [id=2208, type=Unsqueeze]; +"2209 Constant_nncf_2039" [id=2209, type=Constant]; +"2210 Unsqueeze_1764" [id=2210, type=Unsqueeze]; +"2211 Concat_1765" [id=2211, type=Concat]; +"2212 Reshape_1766" [id=2212, type=Reshape]; +"2213 Transpose_1767" [id=2213, type=Transpose]; +"2214 Shape_1768" [id=2214, type=Shape]; +"2215 Constant_1769" [id=2215, type=Constant]; +"2216 Gather_1770" [id=2216, type=Gather]; +"2217 Shape_1771" [id=2217, type=Shape]; +"2218 Constant_1772" [id=2218, type=Constant]; +"2219 Gather_1773" [id=2219, type=Gather]; +"2220 Shape_1774" [id=2220, type=Shape]; +"2221 Constant_1775" [id=2221, type=Constant]; +"2222 Gather_1776" [id=2222, type=Gather]; +"2223 Constant_1777" [id=2223, type=Constant]; +"2224 Div_1778" [id=2224, type=Div]; +"2225 Cast_1779" [id=2225, type=Cast]; +"2226 Cast_1780" [id=2226, type=Cast]; +"2227 Constant_nncf_2057" [id=2227, type=Constant]; +"2228 Unsqueeze_1781" [id=2228, type=Unsqueeze]; +"2229 Constant_nncf_2059" [id=2229, type=Constant]; +"2230 Unsqueeze_1782" [id=2230, type=Unsqueeze]; +"2231 Constant_nncf_2061" [id=2231, type=Constant]; +"2232 Unsqueeze_1783" [id=2232, type=Unsqueeze]; +"2233 Concat_1784" [id=2233, type=Concat]; +"2234 Reshape_1785" [id=2234, type=Reshape]; +"2235 QuantizeLinear_2204_1" [id=2235, type=QuantizeLinear]; +"2236 DequantizeLinear_2204_1" [id=2236, type=DequantizeLinear]; +"2237 Transpose_1786" [id=2237, type=Transpose]; +"2238 Shape_1787" [id=2238, type=Shape]; +"2239 Constant_1788" [id=2239, type=Constant]; +"2240 Gather_1789" [id=2240, type=Gather]; +"2241 Shape_1790" [id=2241, type=Shape]; +"2242 Constant_1791" [id=2242, type=Constant]; +"2243 Gather_1792" [id=2243, type=Gather]; +"2244 Shape_1793" [id=2244, type=Shape]; +"2245 Constant_1794" [id=2245, type=Constant]; +"2246 Gather_1795" [id=2246, type=Gather]; +"2247 Constant_1796" [id=2247, type=Constant]; +"2248 Div_1797" [id=2248, type=Div]; +"2249 Cast_1798" [id=2249, type=Cast]; +"2250 Cast_1799" [id=2250, type=Cast]; +"2251 Constant_nncf_2079" [id=2251, type=Constant]; +"2252 Unsqueeze_1800" [id=2252, type=Unsqueeze]; +"2253 Constant_nncf_2081" [id=2253, type=Constant]; +"2254 Unsqueeze_1801" [id=2254, type=Unsqueeze]; +"2255 Constant_nncf_2083" [id=2255, type=Constant]; +"2256 Unsqueeze_1802" [id=2256, type=Unsqueeze]; +"2257 Concat_1803" [id=2257, type=Concat]; +"2258 Reshape_1804" [id=2258, type=Reshape]; +"2259 Transpose_1805" [id=2259, type=Transpose]; +"2260 Transpose_1806" [id=2260, type=Transpose]; +"2261 Constant_nncf_2089" [id=2261, type=Constant]; +"2262 Unsqueeze_1807" [id=2262, type=Unsqueeze]; +"2263 Constant_nncf_2091" [id=2263, type=Constant]; +"2264 Unsqueeze_1808" [id=2264, type=Unsqueeze]; +"2265 Concat_1809" [id=2265, type=Concat]; +"2266 MatMul_1810" [id=2266, type=MatMul]; +"2267 Constant_1811" [id=2267, type=Constant]; +"2268 Div_1812" [id=2268, type=Div]; +"2269 Shape_1813" [id=2269, type=Shape]; +"2270 Constant_1814" [id=2270, type=Constant]; +"2271 Gather_1815" [id=2271, type=Gather]; +"2272 Shape_1816" [id=2272, type=Shape]; +"2273 Constant_1817" [id=2273, type=Constant]; +"2274 Gather_1818" [id=2274, type=Gather]; +"2275 Sub_1819" [id=2275, type=Sub]; +"2276 Constant_nncf_2104" [id=2276, type=Constant]; +"2277 Unsqueeze_1820" [id=2277, type=Unsqueeze]; +"2278 Constant_nncf_2106" [id=2278, type=Constant]; +"2279 Unsqueeze_1821" [id=2279, type=Unsqueeze]; +"2280 Constant_1822" [id=2280, type=Constant]; +"2281 Slice_1823" [id=2281, type=Slice]; +"2282 Constant_nncf_2110" [id=2282, type=Constant]; +"2283 Unsqueeze_1824" [id=2283, type=Unsqueeze]; +"2284 Constant_1825" [id=2284, type=Constant]; +"2285 Slice_1826" [id=2285, type=Slice]; +"2286 Mul_1827" [id=2286, type=Mul]; +"2287 Constant_1828" [id=2287, type=Constant]; +"2288 Sub_1829" [id=2288, type=Sub]; +"2289 Constant_1830" [id=2289, type=Constant]; +"2290 Mul_1831" [id=2290, type=Mul]; +"2291 Sub_1832" [id=2291, type=Sub]; +"2292 Shape_nncf_2120" [id=2292, type=Shape]; +"2293 Flatten_nncf_2121" [id=2293, type=Flatten]; +"2294 Softmax_1833" [id=2294, type=Softmax]; +"2295 Reshape_nncf_2123" [id=2295, type=Reshape]; +"2296 MatMul_1834" [id=2296, type=MatMul]; +"2297 QuantizeLinear_2261_1" [id=2297, type=QuantizeLinear]; +"2298 DequantizeLinear_2261_1" [id=2298, type=DequantizeLinear]; +"2299 Transpose_1835" [id=2299, type=Transpose]; +"2300 Shape_1836" [id=2300, type=Shape]; +"2301 Constant_1837" [id=2301, type=Constant]; +"2302 Gather_1838" [id=2302, type=Gather]; +"2303 Shape_1839" [id=2303, type=Shape]; +"2304 Constant_1840" [id=2304, type=Constant]; +"2305 Gather_1841" [id=2305, type=Gather]; +"2306 Shape_1842" [id=2306, type=Shape]; +"2307 Constant_1843" [id=2307, type=Constant]; +"2308 Gather_1844" [id=2308, type=Gather]; +"2309 Shape_1845" [id=2309, type=Shape]; +"2310 Constant_1846" [id=2310, type=Constant]; +"2311 Gather_1847" [id=2311, type=Gather]; +"2312 Mul_1848" [id=2312, type=Mul]; +"2313 Constant_nncf_2139" [id=2313, type=Constant]; +"2314 Unsqueeze_1849" [id=2314, type=Unsqueeze]; +"2315 Constant_nncf_2141" [id=2315, type=Constant]; +"2316 Unsqueeze_1850" [id=2316, type=Unsqueeze]; +"2317 Constant_nncf_2143" [id=2317, type=Constant]; +"2318 Unsqueeze_1851" [id=2318, type=Unsqueeze]; +"2319 Concat_1852" [id=2319, type=Concat]; +"2320 Reshape_1853" [id=2320, type=Reshape]; +"2321 Shape_1854" [id=2321, type=Shape]; +"2322 Constant_1855" [id=2322, type=Constant]; +"2323 Gather_1856" [id=2323, type=Gather]; +"2324 Shape_1857" [id=2324, type=Shape]; +"2325 Constant_1858" [id=2325, type=Constant]; +"2326 Gather_1859" [id=2326, type=Gather]; +"2327 Shape_1860" [id=2327, type=Shape]; +"2328 Constant_1861" [id=2328, type=Constant]; +"2329 Gather_1862" [id=2329, type=Gather]; +"2330 Constant_nncf_2156" [id=2330, type=Constant]; +"2331 Unsqueeze_1863" [id=2331, type=Unsqueeze]; +"2332 Concat_1864" [id=2332, type=Concat]; +"2333 Reshape_1865" [id=2333, type=Reshape]; +"2334 QuantizeLinear_h.8.attn.c_proj.weight_1" [id=2334, type=QuantizeLinear]; +"2335 DequantizeLinear_h.8.attn.c_proj.weight_1" [id=2335, type=DequantizeLinear]; +"2336 Gemm_1866" [id=2336, type=Gemm]; +"2337 Constant_nncf_2161" [id=2337, type=Constant]; +"2338 Unsqueeze_1867" [id=2338, type=Unsqueeze]; +"2339 Constant_nncf_2163" [id=2339, type=Constant]; +"2340 Unsqueeze_1868" [id=2340, type=Unsqueeze]; +"2341 Concat_1869" [id=2341, type=Concat]; +"2342 Reshape_1870" [id=2342, type=Reshape]; +"2343 Add_1871" [id=2343, type=Add]; +"2344 ReduceMean_1872" [id=2344, type=ReduceMean]; +"2345 Sub_1873" [id=2345, type=Sub]; +"2346 Constant_1874" [id=2346, type=Constant]; +"2347 Pow_1875" [id=2347, type=Pow]; +"2348 ReduceMean_1876" [id=2348, type=ReduceMean]; +"2349 Constant_1877" [id=2349, type=Constant]; +"2350 Add_1878" [id=2350, type=Add]; +"2351 Sqrt_1879" [id=2351, type=Sqrt]; +"2352 Div_1880" [id=2352, type=Div]; +"2353 Mul_1881" [id=2353, type=Mul]; +"2354 Add_1882" [id=2354, type=Add]; +"2355 QuantizeLinear_2313_1" [id=2355, type=QuantizeLinear]; +"2356 DequantizeLinear_2313_1" [id=2356, type=DequantizeLinear]; +"2357 Shape_1883" [id=2357, type=Shape]; +"2358 Constant_1884" [id=2358, type=Constant]; +"2359 Gather_1885" [id=2359, type=Gather]; +"2360 Shape_1886" [id=2360, type=Shape]; +"2361 Constant_1887" [id=2361, type=Constant]; +"2362 Gather_1888" [id=2362, type=Gather]; +"2363 Shape_1889" [id=2363, type=Shape]; +"2364 Constant_1890" [id=2364, type=Constant]; +"2365 Gather_1891" [id=2365, type=Gather]; +"2366 Constant_nncf_2188" [id=2366, type=Constant]; +"2367 Unsqueeze_1892" [id=2367, type=Unsqueeze]; +"2368 Concat_1893" [id=2368, type=Concat]; +"2369 Reshape_1894" [id=2369, type=Reshape]; +"2370 QuantizeLinear_h.8.mlp.c_fc.weight_1" [id=2370, type=QuantizeLinear]; +"2371 DequantizeLinear_h.8.mlp.c_fc.weight_1" [id=2371, type=DequantizeLinear]; +"2372 Gemm_1895" [id=2372, type=Gemm]; +"2373 Constant_nncf_2193" [id=2373, type=Constant]; +"2374 Unsqueeze_1896" [id=2374, type=Unsqueeze]; +"2375 Constant_nncf_2195" [id=2375, type=Constant]; +"2376 Unsqueeze_1897" [id=2376, type=Unsqueeze]; +"2377 Concat_1898" [id=2377, type=Concat]; +"2378 Reshape_1899" [id=2378, type=Reshape]; +"2379 Constant_1900" [id=2379, type=Constant]; +"2380 Mul_1901" [id=2380, type=Mul]; +"2381 Constant_1902" [id=2381, type=Constant]; +"2382 Pow_1903" [id=2382, type=Pow]; +"2383 Constant_1904" [id=2383, type=Constant]; +"2384 Mul_1905" [id=2384, type=Mul]; +"2385 Add_1906" [id=2385, type=Add]; +"2386 Constant_1907" [id=2386, type=Constant]; +"2387 Mul_1908" [id=2387, type=Mul]; +"2388 Tanh_1909" [id=2388, type=Tanh]; +"2389 Constant_1910" [id=2389, type=Constant]; +"2390 Add_1911" [id=2390, type=Add]; +"2391 Mul_1912" [id=2391, type=Mul]; +"2392 QuantizeLinear_2347_1" [id=2392, type=QuantizeLinear]; +"2393 DequantizeLinear_2347_1" [id=2393, type=DequantizeLinear]; +"2394 Shape_1913" [id=2394, type=Shape]; +"2395 Constant_1914" [id=2395, type=Constant]; +"2396 Gather_1915" [id=2396, type=Gather]; +"2397 Shape_1916" [id=2397, type=Shape]; +"2398 Constant_1917" [id=2398, type=Constant]; +"2399 Gather_1918" [id=2399, type=Gather]; +"2400 Shape_1919" [id=2400, type=Shape]; +"2401 Constant_1920" [id=2401, type=Constant]; +"2402 Gather_1921" [id=2402, type=Gather]; +"2403 Constant_nncf_2221" [id=2403, type=Constant]; +"2404 Unsqueeze_1922" [id=2404, type=Unsqueeze]; +"2405 Concat_1923" [id=2405, type=Concat]; +"2406 Reshape_1924" [id=2406, type=Reshape]; +"2407 QuantizeLinear_h.8.mlp.c_proj.weight_1" [id=2407, type=QuantizeLinear]; +"2408 DequantizeLinear_h.8.mlp.c_proj.weight_1" [id=2408, type=DequantizeLinear]; +"2409 Gemm_1925" [id=2409, type=Gemm]; +"2410 Constant_nncf_2226" [id=2410, type=Constant]; +"2411 Unsqueeze_1926" [id=2411, type=Unsqueeze]; +"2412 Constant_nncf_2228" [id=2412, type=Constant]; +"2413 Unsqueeze_1927" [id=2413, type=Unsqueeze]; +"2414 Concat_1928" [id=2414, type=Concat]; +"2415 Reshape_1929" [id=2415, type=Reshape]; +"2416 Add_1930" [id=2416, type=Add]; +"2417 ReduceMean_1931" [id=2417, type=ReduceMean]; +"2418 Sub_1932" [id=2418, type=Sub]; +"2419 Constant_1933" [id=2419, type=Constant]; +"2420 Pow_1934" [id=2420, type=Pow]; +"2421 ReduceMean_1935" [id=2421, type=ReduceMean]; +"2422 Constant_1936" [id=2422, type=Constant]; +"2423 Add_1937" [id=2423, type=Add]; +"2424 Sqrt_1938" [id=2424, type=Sqrt]; +"2425 Div_1939" [id=2425, type=Div]; +"2426 Mul_1940" [id=2426, type=Mul]; +"2427 Add_1941" [id=2427, type=Add]; +"2428 QuantizeLinear_2380_1" [id=2428, type=QuantizeLinear]; +"2429 DequantizeLinear_2380_1" [id=2429, type=DequantizeLinear]; +"2430 Shape_1942" [id=2430, type=Shape]; +"2431 Constant_1943" [id=2431, type=Constant]; +"2432 Gather_1944" [id=2432, type=Gather]; +"2433 Shape_1945" [id=2433, type=Shape]; +"2434 Constant_1946" [id=2434, type=Constant]; +"2435 Gather_1947" [id=2435, type=Gather]; +"2436 Shape_1948" [id=2436, type=Shape]; +"2437 Constant_1949" [id=2437, type=Constant]; +"2438 Gather_1950" [id=2438, type=Gather]; +"2439 Constant_nncf_2253" [id=2439, type=Constant]; +"2440 Unsqueeze_1951" [id=2440, type=Unsqueeze]; +"2441 Concat_1952" [id=2441, type=Concat]; +"2442 Reshape_1953" [id=2442, type=Reshape]; +"2443 QuantizeLinear_h.9.attn.c_attn.weight_1" [id=2443, type=QuantizeLinear]; +"2444 DequantizeLinear_h.9.attn.c_attn.weight_1" [id=2444, type=DequantizeLinear]; +"2445 Gemm_1954" [id=2445, type=Gemm]; +"2446 Constant_nncf_2258" [id=2446, type=Constant]; +"2447 Unsqueeze_1955" [id=2447, type=Unsqueeze]; +"2448 Constant_nncf_2260" [id=2448, type=Constant]; +"2449 Unsqueeze_1956" [id=2449, type=Unsqueeze]; +"2450 Concat_1957" [id=2450, type=Concat]; +"2451 Reshape_1958" [id=2451, type=Reshape]; +"2452 Constant_nncf_2264" [id=2452, type=Constant]; +"2453 Split_1959" [id=2453, type=Split]; +"2454 QuantizeLinear_query.19_1" [id=2454, type=QuantizeLinear]; +"2455 DequantizeLinear_query.19_1" [id=2455, type=DequantizeLinear]; +"2456 Shape_1960" [id=2456, type=Shape]; +"2457 Constant_1961" [id=2457, type=Constant]; +"2458 Gather_1962" [id=2458, type=Gather]; +"2459 Shape_1963" [id=2459, type=Shape]; +"2460 Constant_1964" [id=2460, type=Constant]; +"2461 Gather_1965" [id=2461, type=Gather]; +"2462 Shape_1966" [id=2462, type=Shape]; +"2463 Constant_1967" [id=2463, type=Constant]; +"2464 Gather_1968" [id=2464, type=Gather]; +"2465 Constant_1969" [id=2465, type=Constant]; +"2466 Div_1970" [id=2466, type=Div]; +"2467 Cast_1971" [id=2467, type=Cast]; +"2468 Cast_1972" [id=2468, type=Cast]; +"2469 Constant_nncf_2279" [id=2469, type=Constant]; +"2470 Unsqueeze_1973" [id=2470, type=Unsqueeze]; +"2471 Constant_nncf_2281" [id=2471, type=Constant]; +"2472 Unsqueeze_1974" [id=2472, type=Unsqueeze]; +"2473 Constant_nncf_2283" [id=2473, type=Constant]; +"2474 Unsqueeze_1975" [id=2474, type=Unsqueeze]; +"2475 Concat_1976" [id=2475, type=Concat]; +"2476 Reshape_1977" [id=2476, type=Reshape]; +"2477 Transpose_1978" [id=2477, type=Transpose]; +"2478 Shape_1979" [id=2478, type=Shape]; +"2479 Constant_1980" [id=2479, type=Constant]; +"2480 Gather_1981" [id=2480, type=Gather]; +"2481 Shape_1982" [id=2481, type=Shape]; +"2482 Constant_1983" [id=2482, type=Constant]; +"2483 Gather_1984" [id=2483, type=Gather]; +"2484 Shape_1985" [id=2484, type=Shape]; +"2485 Constant_1986" [id=2485, type=Constant]; +"2486 Gather_1987" [id=2486, type=Gather]; +"2487 Constant_1988" [id=2487, type=Constant]; +"2488 Div_1989" [id=2488, type=Div]; +"2489 Cast_1990" [id=2489, type=Cast]; +"2490 Cast_1991" [id=2490, type=Cast]; +"2491 Constant_nncf_2301" [id=2491, type=Constant]; +"2492 Unsqueeze_1992" [id=2492, type=Unsqueeze]; +"2493 Constant_nncf_2303" [id=2493, type=Constant]; +"2494 Unsqueeze_1993" [id=2494, type=Unsqueeze]; +"2495 Constant_nncf_2305" [id=2495, type=Constant]; +"2496 Unsqueeze_1994" [id=2496, type=Unsqueeze]; +"2497 Concat_1995" [id=2497, type=Concat]; +"2498 Reshape_1996" [id=2498, type=Reshape]; +"2499 QuantizeLinear_2445_1" [id=2499, type=QuantizeLinear]; +"2500 DequantizeLinear_2445_1" [id=2500, type=DequantizeLinear]; +"2501 Transpose_1997" [id=2501, type=Transpose]; +"2502 Shape_1998" [id=2502, type=Shape]; +"2503 Constant_1999" [id=2503, type=Constant]; +"2504 Gather_2000" [id=2504, type=Gather]; +"2505 Shape_2001" [id=2505, type=Shape]; +"2506 Constant_2002" [id=2506, type=Constant]; +"2507 Gather_2003" [id=2507, type=Gather]; +"2508 Shape_2004" [id=2508, type=Shape]; +"2509 Constant_2005" [id=2509, type=Constant]; +"2510 Gather_2006" [id=2510, type=Gather]; +"2511 Constant_2007" [id=2511, type=Constant]; +"2512 Div_2008" [id=2512, type=Div]; +"2513 Cast_2009" [id=2513, type=Cast]; +"2514 Cast_2010" [id=2514, type=Cast]; +"2515 Constant_nncf_2323" [id=2515, type=Constant]; +"2516 Unsqueeze_2011" [id=2516, type=Unsqueeze]; +"2517 Constant_nncf_2325" [id=2517, type=Constant]; +"2518 Unsqueeze_2012" [id=2518, type=Unsqueeze]; +"2519 Constant_nncf_2327" [id=2519, type=Constant]; +"2520 Unsqueeze_2013" [id=2520, type=Unsqueeze]; +"2521 Concat_2014" [id=2521, type=Concat]; +"2522 Reshape_2015" [id=2522, type=Reshape]; +"2523 Transpose_2016" [id=2523, type=Transpose]; +"2524 Transpose_2017" [id=2524, type=Transpose]; +"2525 Constant_nncf_2333" [id=2525, type=Constant]; +"2526 Unsqueeze_2018" [id=2526, type=Unsqueeze]; +"2527 Constant_nncf_2335" [id=2527, type=Constant]; +"2528 Unsqueeze_2019" [id=2528, type=Unsqueeze]; +"2529 Concat_2020" [id=2529, type=Concat]; +"2530 MatMul_2021" [id=2530, type=MatMul]; +"2531 Constant_2022" [id=2531, type=Constant]; +"2532 Div_2023" [id=2532, type=Div]; +"2533 Shape_2024" [id=2533, type=Shape]; +"2534 Constant_2025" [id=2534, type=Constant]; +"2535 Gather_2026" [id=2535, type=Gather]; +"2536 Shape_2027" [id=2536, type=Shape]; +"2537 Constant_2028" [id=2537, type=Constant]; +"2538 Gather_2029" [id=2538, type=Gather]; +"2539 Sub_2030" [id=2539, type=Sub]; +"2540 Constant_nncf_2348" [id=2540, type=Constant]; +"2541 Unsqueeze_2031" [id=2541, type=Unsqueeze]; +"2542 Constant_nncf_2350" [id=2542, type=Constant]; +"2543 Unsqueeze_2032" [id=2543, type=Unsqueeze]; +"2544 Constant_2033" [id=2544, type=Constant]; +"2545 Slice_2034" [id=2545, type=Slice]; +"2546 Constant_nncf_2354" [id=2546, type=Constant]; +"2547 Unsqueeze_2035" [id=2547, type=Unsqueeze]; +"2548 Constant_2036" [id=2548, type=Constant]; +"2549 Slice_2037" [id=2549, type=Slice]; +"2550 Mul_2038" [id=2550, type=Mul]; +"2551 Constant_2039" [id=2551, type=Constant]; +"2552 Sub_2040" [id=2552, type=Sub]; +"2553 Constant_2041" [id=2553, type=Constant]; +"2554 Mul_2042" [id=2554, type=Mul]; +"2555 Sub_2043" [id=2555, type=Sub]; +"2556 Shape_nncf_2364" [id=2556, type=Shape]; +"2557 Flatten_nncf_2365" [id=2557, type=Flatten]; +"2558 Softmax_2044" [id=2558, type=Softmax]; +"2559 Reshape_nncf_2367" [id=2559, type=Reshape]; +"2560 MatMul_2045" [id=2560, type=MatMul]; +"2561 QuantizeLinear_2502_1" [id=2561, type=QuantizeLinear]; +"2562 DequantizeLinear_2502_1" [id=2562, type=DequantizeLinear]; +"2563 Transpose_2046" [id=2563, type=Transpose]; +"2564 Shape_2047" [id=2564, type=Shape]; +"2565 Constant_2048" [id=2565, type=Constant]; +"2566 Gather_2049" [id=2566, type=Gather]; +"2567 Shape_2050" [id=2567, type=Shape]; +"2568 Constant_2051" [id=2568, type=Constant]; +"2569 Gather_2052" [id=2569, type=Gather]; +"2570 Shape_2053" [id=2570, type=Shape]; +"2571 Constant_2054" [id=2571, type=Constant]; +"2572 Gather_2055" [id=2572, type=Gather]; +"2573 Shape_2056" [id=2573, type=Shape]; +"2574 Constant_2057" [id=2574, type=Constant]; +"2575 Gather_2058" [id=2575, type=Gather]; +"2576 Mul_2059" [id=2576, type=Mul]; +"2577 Constant_nncf_2383" [id=2577, type=Constant]; +"2578 Unsqueeze_2060" [id=2578, type=Unsqueeze]; +"2579 Constant_nncf_2385" [id=2579, type=Constant]; +"2580 Unsqueeze_2061" [id=2580, type=Unsqueeze]; +"2581 Constant_nncf_2387" [id=2581, type=Constant]; +"2582 Unsqueeze_2062" [id=2582, type=Unsqueeze]; +"2583 Concat_2063" [id=2583, type=Concat]; +"2584 Reshape_2064" [id=2584, type=Reshape]; +"2585 Shape_2065" [id=2585, type=Shape]; +"2586 Constant_2066" [id=2586, type=Constant]; +"2587 Gather_2067" [id=2587, type=Gather]; +"2588 Shape_2068" [id=2588, type=Shape]; +"2589 Constant_2069" [id=2589, type=Constant]; +"2590 Gather_2070" [id=2590, type=Gather]; +"2591 Shape_2071" [id=2591, type=Shape]; +"2592 Constant_2072" [id=2592, type=Constant]; +"2593 Gather_2073" [id=2593, type=Gather]; +"2594 Constant_nncf_2400" [id=2594, type=Constant]; +"2595 Unsqueeze_2074" [id=2595, type=Unsqueeze]; +"2596 Concat_2075" [id=2596, type=Concat]; +"2597 Reshape_2076" [id=2597, type=Reshape]; +"2598 QuantizeLinear_h.9.attn.c_proj.weight_1" [id=2598, type=QuantizeLinear]; +"2599 DequantizeLinear_h.9.attn.c_proj.weight_1" [id=2599, type=DequantizeLinear]; +"2600 Gemm_2077" [id=2600, type=Gemm]; +"2601 Constant_nncf_2405" [id=2601, type=Constant]; +"2602 Unsqueeze_2078" [id=2602, type=Unsqueeze]; +"2603 Constant_nncf_2407" [id=2603, type=Constant]; +"2604 Unsqueeze_2079" [id=2604, type=Unsqueeze]; +"2605 Concat_2080" [id=2605, type=Concat]; +"2606 Reshape_2081" [id=2606, type=Reshape]; +"2607 Add_2082" [id=2607, type=Add]; +"2608 ReduceMean_2083" [id=2608, type=ReduceMean]; +"2609 Sub_2084" [id=2609, type=Sub]; +"2610 Constant_2085" [id=2610, type=Constant]; +"2611 Pow_2086" [id=2611, type=Pow]; +"2612 ReduceMean_2087" [id=2612, type=ReduceMean]; +"2613 Constant_2088" [id=2613, type=Constant]; +"2614 Add_2089" [id=2614, type=Add]; +"2615 Sqrt_2090" [id=2615, type=Sqrt]; +"2616 Div_2091" [id=2616, type=Div]; +"2617 Mul_2092" [id=2617, type=Mul]; +"2618 Add_2093" [id=2618, type=Add]; +"2619 QuantizeLinear_2554_1" [id=2619, type=QuantizeLinear]; +"2620 DequantizeLinear_2554_1" [id=2620, type=DequantizeLinear]; +"2621 Shape_2094" [id=2621, type=Shape]; +"2622 Constant_2095" [id=2622, type=Constant]; +"2623 Gather_2096" [id=2623, type=Gather]; +"2624 Shape_2097" [id=2624, type=Shape]; +"2625 Constant_2098" [id=2625, type=Constant]; +"2626 Gather_2099" [id=2626, type=Gather]; +"2627 Shape_2100" [id=2627, type=Shape]; +"2628 Constant_2101" [id=2628, type=Constant]; +"2629 Gather_2102" [id=2629, type=Gather]; +"2630 Constant_nncf_2432" [id=2630, type=Constant]; +"2631 Unsqueeze_2103" [id=2631, type=Unsqueeze]; +"2632 Concat_2104" [id=2632, type=Concat]; +"2633 Reshape_2105" [id=2633, type=Reshape]; +"2634 QuantizeLinear_h.9.mlp.c_fc.weight_1" [id=2634, type=QuantizeLinear]; +"2635 DequantizeLinear_h.9.mlp.c_fc.weight_1" [id=2635, type=DequantizeLinear]; +"2636 Gemm_2106" [id=2636, type=Gemm]; +"2637 Constant_nncf_2437" [id=2637, type=Constant]; +"2638 Unsqueeze_2107" [id=2638, type=Unsqueeze]; +"2639 Constant_nncf_2439" [id=2639, type=Constant]; +"2640 Unsqueeze_2108" [id=2640, type=Unsqueeze]; +"2641 Concat_2109" [id=2641, type=Concat]; +"2642 Reshape_2110" [id=2642, type=Reshape]; +"2643 Constant_2111" [id=2643, type=Constant]; +"2644 Mul_2112" [id=2644, type=Mul]; +"2645 Constant_2113" [id=2645, type=Constant]; +"2646 Pow_2114" [id=2646, type=Pow]; +"2647 Constant_2115" [id=2647, type=Constant]; +"2648 Mul_2116" [id=2648, type=Mul]; +"2649 Add_2117" [id=2649, type=Add]; +"2650 Constant_2118" [id=2650, type=Constant]; +"2651 Mul_2119" [id=2651, type=Mul]; +"2652 Tanh_2120" [id=2652, type=Tanh]; +"2653 Constant_2121" [id=2653, type=Constant]; +"2654 Add_2122" [id=2654, type=Add]; +"2655 Mul_2123" [id=2655, type=Mul]; +"2656 QuantizeLinear_2588_1" [id=2656, type=QuantizeLinear]; +"2657 DequantizeLinear_2588_1" [id=2657, type=DequantizeLinear]; +"2658 Shape_2124" [id=2658, type=Shape]; +"2659 Constant_2125" [id=2659, type=Constant]; +"2660 Gather_2126" [id=2660, type=Gather]; +"2661 Shape_2127" [id=2661, type=Shape]; +"2662 Constant_2128" [id=2662, type=Constant]; +"2663 Gather_2129" [id=2663, type=Gather]; +"2664 Shape_2130" [id=2664, type=Shape]; +"2665 Constant_2131" [id=2665, type=Constant]; +"2666 Gather_2132" [id=2666, type=Gather]; +"2667 Constant_nncf_2465" [id=2667, type=Constant]; +"2668 Unsqueeze_2133" [id=2668, type=Unsqueeze]; +"2669 Concat_2134" [id=2669, type=Concat]; +"2670 Reshape_2135" [id=2670, type=Reshape]; +"2671 QuantizeLinear_h.9.mlp.c_proj.weight_1" [id=2671, type=QuantizeLinear]; +"2672 DequantizeLinear_h.9.mlp.c_proj.weight_1" [id=2672, type=DequantizeLinear]; +"2673 Gemm_2136" [id=2673, type=Gemm]; +"2674 Constant_nncf_2470" [id=2674, type=Constant]; +"2675 Unsqueeze_2137" [id=2675, type=Unsqueeze]; +"2676 Constant_nncf_2472" [id=2676, type=Constant]; +"2677 Unsqueeze_2138" [id=2677, type=Unsqueeze]; +"2678 Concat_2139" [id=2678, type=Concat]; +"2679 Reshape_2140" [id=2679, type=Reshape]; +"2680 Add_2141" [id=2680, type=Add]; +"2681 ReduceMean_2142" [id=2681, type=ReduceMean]; +"2682 Sub_2143" [id=2682, type=Sub]; +"2683 Constant_2144" [id=2683, type=Constant]; +"2684 Pow_2145" [id=2684, type=Pow]; +"2685 ReduceMean_2146" [id=2685, type=ReduceMean]; +"2686 Constant_2147" [id=2686, type=Constant]; +"2687 Add_2148" [id=2687, type=Add]; +"2688 Sqrt_2149" [id=2688, type=Sqrt]; +"2689 Div_2150" [id=2689, type=Div]; +"2690 Mul_2151" [id=2690, type=Mul]; +"2691 Add_2152" [id=2691, type=Add]; +"2692 QuantizeLinear_2621_1" [id=2692, type=QuantizeLinear]; +"2693 DequantizeLinear_2621_1" [id=2693, type=DequantizeLinear]; +"2694 Shape_2153" [id=2694, type=Shape]; +"2695 Constant_2154" [id=2695, type=Constant]; +"2696 Gather_2155" [id=2696, type=Gather]; +"2697 Shape_2156" [id=2697, type=Shape]; +"2698 Constant_2157" [id=2698, type=Constant]; +"2699 Gather_2158" [id=2699, type=Gather]; +"2700 Shape_2159" [id=2700, type=Shape]; +"2701 Constant_2160" [id=2701, type=Constant]; +"2702 Gather_2161" [id=2702, type=Gather]; +"2703 Constant_nncf_2497" [id=2703, type=Constant]; +"2704 Unsqueeze_2162" [id=2704, type=Unsqueeze]; +"2705 Concat_2163" [id=2705, type=Concat]; +"2706 Reshape_2164" [id=2706, type=Reshape]; +"2707 QuantizeLinear_h.10.attn.c_attn.weight_1" [id=2707, type=QuantizeLinear]; +"2708 DequantizeLinear_h.10.attn.c_attn.weight_1" [id=2708, type=DequantizeLinear]; +"2709 Gemm_2165" [id=2709, type=Gemm]; +"2710 Constant_nncf_2502" [id=2710, type=Constant]; +"2711 Unsqueeze_2166" [id=2711, type=Unsqueeze]; +"2712 Constant_nncf_2504" [id=2712, type=Constant]; +"2713 Unsqueeze_2167" [id=2713, type=Unsqueeze]; +"2714 Concat_2168" [id=2714, type=Concat]; +"2715 Reshape_2169" [id=2715, type=Reshape]; +"2716 Constant_nncf_2508" [id=2716, type=Constant]; +"2717 Split_2170" [id=2717, type=Split]; +"2718 QuantizeLinear_query.21_1" [id=2718, type=QuantizeLinear]; +"2719 DequantizeLinear_query.21_1" [id=2719, type=DequantizeLinear]; +"2720 Shape_2171" [id=2720, type=Shape]; +"2721 Constant_2172" [id=2721, type=Constant]; +"2722 Gather_2173" [id=2722, type=Gather]; +"2723 Shape_2174" [id=2723, type=Shape]; +"2724 Constant_2175" [id=2724, type=Constant]; +"2725 Gather_2176" [id=2725, type=Gather]; +"2726 Shape_2177" [id=2726, type=Shape]; +"2727 Constant_2178" [id=2727, type=Constant]; +"2728 Gather_2179" [id=2728, type=Gather]; +"2729 Constant_2180" [id=2729, type=Constant]; +"2730 Div_2181" [id=2730, type=Div]; +"2731 Cast_2182" [id=2731, type=Cast]; +"2732 Cast_2183" [id=2732, type=Cast]; +"2733 Constant_nncf_2523" [id=2733, type=Constant]; +"2734 Unsqueeze_2184" [id=2734, type=Unsqueeze]; +"2735 Constant_nncf_2525" [id=2735, type=Constant]; +"2736 Unsqueeze_2185" [id=2736, type=Unsqueeze]; +"2737 Constant_nncf_2527" [id=2737, type=Constant]; +"2738 Unsqueeze_2186" [id=2738, type=Unsqueeze]; +"2739 Concat_2187" [id=2739, type=Concat]; +"2740 Reshape_2188" [id=2740, type=Reshape]; +"2741 Transpose_2189" [id=2741, type=Transpose]; +"2742 Shape_2190" [id=2742, type=Shape]; +"2743 Constant_2191" [id=2743, type=Constant]; +"2744 Gather_2192" [id=2744, type=Gather]; +"2745 Shape_2193" [id=2745, type=Shape]; +"2746 Constant_2194" [id=2746, type=Constant]; +"2747 Gather_2195" [id=2747, type=Gather]; +"2748 Shape_2196" [id=2748, type=Shape]; +"2749 Constant_2197" [id=2749, type=Constant]; +"2750 Gather_2198" [id=2750, type=Gather]; +"2751 Constant_2199" [id=2751, type=Constant]; +"2752 Div_2200" [id=2752, type=Div]; +"2753 Cast_2201" [id=2753, type=Cast]; +"2754 Cast_2202" [id=2754, type=Cast]; +"2755 Constant_nncf_2545" [id=2755, type=Constant]; +"2756 Unsqueeze_2203" [id=2756, type=Unsqueeze]; +"2757 Constant_nncf_2547" [id=2757, type=Constant]; +"2758 Unsqueeze_2204" [id=2758, type=Unsqueeze]; +"2759 Constant_nncf_2549" [id=2759, type=Constant]; +"2760 Unsqueeze_2205" [id=2760, type=Unsqueeze]; +"2761 Concat_2206" [id=2761, type=Concat]; +"2762 Reshape_2207" [id=2762, type=Reshape]; +"2763 QuantizeLinear_2686_1" [id=2763, type=QuantizeLinear]; +"2764 DequantizeLinear_2686_1" [id=2764, type=DequantizeLinear]; +"2765 Transpose_2208" [id=2765, type=Transpose]; +"2766 Shape_2209" [id=2766, type=Shape]; +"2767 Constant_2210" [id=2767, type=Constant]; +"2768 Gather_2211" [id=2768, type=Gather]; +"2769 Shape_2212" [id=2769, type=Shape]; +"2770 Constant_2213" [id=2770, type=Constant]; +"2771 Gather_2214" [id=2771, type=Gather]; +"2772 Shape_2215" [id=2772, type=Shape]; +"2773 Constant_2216" [id=2773, type=Constant]; +"2774 Gather_2217" [id=2774, type=Gather]; +"2775 Constant_2218" [id=2775, type=Constant]; +"2776 Div_2219" [id=2776, type=Div]; +"2777 Cast_2220" [id=2777, type=Cast]; +"2778 Cast_2221" [id=2778, type=Cast]; +"2779 Constant_nncf_2567" [id=2779, type=Constant]; +"2780 Unsqueeze_2222" [id=2780, type=Unsqueeze]; +"2781 Constant_nncf_2569" [id=2781, type=Constant]; +"2782 Unsqueeze_2223" [id=2782, type=Unsqueeze]; +"2783 Constant_nncf_2571" [id=2783, type=Constant]; +"2784 Unsqueeze_2224" [id=2784, type=Unsqueeze]; +"2785 Concat_2225" [id=2785, type=Concat]; +"2786 Reshape_2226" [id=2786, type=Reshape]; +"2787 Transpose_2227" [id=2787, type=Transpose]; +"2788 Transpose_2228" [id=2788, type=Transpose]; +"2789 Constant_nncf_2577" [id=2789, type=Constant]; +"2790 Unsqueeze_2229" [id=2790, type=Unsqueeze]; +"2791 Constant_nncf_2579" [id=2791, type=Constant]; +"2792 Unsqueeze_2230" [id=2792, type=Unsqueeze]; +"2793 Concat_2231" [id=2793, type=Concat]; +"2794 MatMul_2232" [id=2794, type=MatMul]; +"2795 Constant_2233" [id=2795, type=Constant]; +"2796 Div_2234" [id=2796, type=Div]; +"2797 Shape_2235" [id=2797, type=Shape]; +"2798 Constant_2236" [id=2798, type=Constant]; +"2799 Gather_2237" [id=2799, type=Gather]; +"2800 Shape_2238" [id=2800, type=Shape]; +"2801 Constant_2239" [id=2801, type=Constant]; +"2802 Gather_2240" [id=2802, type=Gather]; +"2803 Sub_2241" [id=2803, type=Sub]; +"2804 Constant_nncf_2592" [id=2804, type=Constant]; +"2805 Unsqueeze_2242" [id=2805, type=Unsqueeze]; +"2806 Constant_nncf_2594" [id=2806, type=Constant]; +"2807 Unsqueeze_2243" [id=2807, type=Unsqueeze]; +"2808 Constant_2244" [id=2808, type=Constant]; +"2809 Slice_2245" [id=2809, type=Slice]; +"2810 Constant_nncf_2598" [id=2810, type=Constant]; +"2811 Unsqueeze_2246" [id=2811, type=Unsqueeze]; +"2812 Constant_2247" [id=2812, type=Constant]; +"2813 Slice_2248" [id=2813, type=Slice]; +"2814 Mul_2249" [id=2814, type=Mul]; +"2815 Constant_2250" [id=2815, type=Constant]; +"2816 Sub_2251" [id=2816, type=Sub]; +"2817 Constant_2252" [id=2817, type=Constant]; +"2818 Mul_2253" [id=2818, type=Mul]; +"2819 Sub_2254" [id=2819, type=Sub]; +"2820 Shape_nncf_2608" [id=2820, type=Shape]; +"2821 Flatten_nncf_2609" [id=2821, type=Flatten]; +"2822 Softmax_2255" [id=2822, type=Softmax]; +"2823 Reshape_nncf_2611" [id=2823, type=Reshape]; +"2824 MatMul_2256" [id=2824, type=MatMul]; +"2825 QuantizeLinear_2743_1" [id=2825, type=QuantizeLinear]; +"2826 DequantizeLinear_2743_1" [id=2826, type=DequantizeLinear]; +"2827 Transpose_2257" [id=2827, type=Transpose]; +"2828 Shape_2258" [id=2828, type=Shape]; +"2829 Constant_2259" [id=2829, type=Constant]; +"2830 Gather_2260" [id=2830, type=Gather]; +"2831 Shape_2261" [id=2831, type=Shape]; +"2832 Constant_2262" [id=2832, type=Constant]; +"2833 Gather_2263" [id=2833, type=Gather]; +"2834 Shape_2264" [id=2834, type=Shape]; +"2835 Constant_2265" [id=2835, type=Constant]; +"2836 Gather_2266" [id=2836, type=Gather]; +"2837 Shape_2267" [id=2837, type=Shape]; +"2838 Constant_2268" [id=2838, type=Constant]; +"2839 Gather_2269" [id=2839, type=Gather]; +"2840 Mul_2270" [id=2840, type=Mul]; +"2841 Constant_nncf_2627" [id=2841, type=Constant]; +"2842 Unsqueeze_2271" [id=2842, type=Unsqueeze]; +"2843 Constant_nncf_2629" [id=2843, type=Constant]; +"2844 Unsqueeze_2272" [id=2844, type=Unsqueeze]; +"2845 Constant_nncf_2631" [id=2845, type=Constant]; +"2846 Unsqueeze_2273" [id=2846, type=Unsqueeze]; +"2847 Concat_2274" [id=2847, type=Concat]; +"2848 Reshape_2275" [id=2848, type=Reshape]; +"2849 Shape_2276" [id=2849, type=Shape]; +"2850 Constant_2277" [id=2850, type=Constant]; +"2851 Gather_2278" [id=2851, type=Gather]; +"2852 Shape_2279" [id=2852, type=Shape]; +"2853 Constant_2280" [id=2853, type=Constant]; +"2854 Gather_2281" [id=2854, type=Gather]; +"2855 Shape_2282" [id=2855, type=Shape]; +"2856 Constant_2283" [id=2856, type=Constant]; +"2857 Gather_2284" [id=2857, type=Gather]; +"2858 Constant_nncf_2644" [id=2858, type=Constant]; +"2859 Unsqueeze_2285" [id=2859, type=Unsqueeze]; +"2860 Concat_2286" [id=2860, type=Concat]; +"2861 Reshape_2287" [id=2861, type=Reshape]; +"2862 QuantizeLinear_h.10.attn.c_proj.weight_1" [id=2862, type=QuantizeLinear]; +"2863 DequantizeLinear_h.10.attn.c_proj.weight_1" [id=2863, type=DequantizeLinear]; +"2864 Gemm_2288" [id=2864, type=Gemm]; +"2865 Constant_nncf_2649" [id=2865, type=Constant]; +"2866 Unsqueeze_2289" [id=2866, type=Unsqueeze]; +"2867 Constant_nncf_2651" [id=2867, type=Constant]; +"2868 Unsqueeze_2290" [id=2868, type=Unsqueeze]; +"2869 Concat_2291" [id=2869, type=Concat]; +"2870 Reshape_2292" [id=2870, type=Reshape]; +"2871 Add_2293" [id=2871, type=Add]; +"2872 ReduceMean_2294" [id=2872, type=ReduceMean]; +"2873 Sub_2295" [id=2873, type=Sub]; +"2874 Constant_2296" [id=2874, type=Constant]; +"2875 Pow_2297" [id=2875, type=Pow]; +"2876 ReduceMean_2298" [id=2876, type=ReduceMean]; +"2877 Constant_2299" [id=2877, type=Constant]; +"2878 Add_2300" [id=2878, type=Add]; +"2879 Sqrt_2301" [id=2879, type=Sqrt]; +"2880 Div_2302" [id=2880, type=Div]; +"2881 Mul_2303" [id=2881, type=Mul]; +"2882 Add_2304" [id=2882, type=Add]; +"2883 QuantizeLinear_2795_1" [id=2883, type=QuantizeLinear]; +"2884 DequantizeLinear_2795_1" [id=2884, type=DequantizeLinear]; +"2885 Shape_2305" [id=2885, type=Shape]; +"2886 Constant_2306" [id=2886, type=Constant]; +"2887 Gather_2307" [id=2887, type=Gather]; +"2888 Shape_2308" [id=2888, type=Shape]; +"2889 Constant_2309" [id=2889, type=Constant]; +"2890 Gather_2310" [id=2890, type=Gather]; +"2891 Shape_2311" [id=2891, type=Shape]; +"2892 Constant_2312" [id=2892, type=Constant]; +"2893 Gather_2313" [id=2893, type=Gather]; +"2894 Constant_nncf_2676" [id=2894, type=Constant]; +"2895 Unsqueeze_2314" [id=2895, type=Unsqueeze]; +"2896 Concat_2315" [id=2896, type=Concat]; +"2897 Reshape_2316" [id=2897, type=Reshape]; +"2898 QuantizeLinear_h.10.mlp.c_fc.weight_1" [id=2898, type=QuantizeLinear]; +"2899 DequantizeLinear_h.10.mlp.c_fc.weight_1" [id=2899, type=DequantizeLinear]; +"2900 Gemm_2317" [id=2900, type=Gemm]; +"2901 Constant_nncf_2681" [id=2901, type=Constant]; +"2902 Unsqueeze_2318" [id=2902, type=Unsqueeze]; +"2903 Constant_nncf_2683" [id=2903, type=Constant]; +"2904 Unsqueeze_2319" [id=2904, type=Unsqueeze]; +"2905 Concat_2320" [id=2905, type=Concat]; +"2906 Reshape_2321" [id=2906, type=Reshape]; +"2907 Constant_2322" [id=2907, type=Constant]; +"2908 Mul_2323" [id=2908, type=Mul]; +"2909 Constant_2324" [id=2909, type=Constant]; +"2910 Pow_2325" [id=2910, type=Pow]; +"2911 Constant_2326" [id=2911, type=Constant]; +"2912 Mul_2327" [id=2912, type=Mul]; +"2913 Add_2328" [id=2913, type=Add]; +"2914 Constant_2329" [id=2914, type=Constant]; +"2915 Mul_2330" [id=2915, type=Mul]; +"2916 Tanh_2331" [id=2916, type=Tanh]; +"2917 Constant_2332" [id=2917, type=Constant]; +"2918 Add_2333" [id=2918, type=Add]; +"2919 Mul_2334" [id=2919, type=Mul]; +"2920 QuantizeLinear_2829_1" [id=2920, type=QuantizeLinear]; +"2921 DequantizeLinear_2829_1" [id=2921, type=DequantizeLinear]; +"2922 Shape_2335" [id=2922, type=Shape]; +"2923 Constant_2336" [id=2923, type=Constant]; +"2924 Gather_2337" [id=2924, type=Gather]; +"2925 Shape_2338" [id=2925, type=Shape]; +"2926 Constant_2339" [id=2926, type=Constant]; +"2927 Gather_2340" [id=2927, type=Gather]; +"2928 Shape_2341" [id=2928, type=Shape]; +"2929 Constant_2342" [id=2929, type=Constant]; +"2930 Gather_2343" [id=2930, type=Gather]; +"2931 Constant_nncf_2709" [id=2931, type=Constant]; +"2932 Unsqueeze_2344" [id=2932, type=Unsqueeze]; +"2933 Concat_2345" [id=2933, type=Concat]; +"2934 Reshape_2346" [id=2934, type=Reshape]; +"2935 QuantizeLinear_h.10.mlp.c_proj.weight_1" [id=2935, type=QuantizeLinear]; +"2936 DequantizeLinear_h.10.mlp.c_proj.weight_1" [id=2936, type=DequantizeLinear]; +"2937 Gemm_2347" [id=2937, type=Gemm]; +"2938 Constant_nncf_2714" [id=2938, type=Constant]; +"2939 Unsqueeze_2348" [id=2939, type=Unsqueeze]; +"2940 Constant_nncf_2716" [id=2940, type=Constant]; +"2941 Unsqueeze_2349" [id=2941, type=Unsqueeze]; +"2942 Concat_2350" [id=2942, type=Concat]; +"2943 Reshape_2351" [id=2943, type=Reshape]; +"2944 Add_2352" [id=2944, type=Add]; +"2945 ReduceMean_2353" [id=2945, type=ReduceMean]; +"2946 Sub_2354" [id=2946, type=Sub]; +"2947 Constant_2355" [id=2947, type=Constant]; +"2948 Pow_2356" [id=2948, type=Pow]; +"2949 ReduceMean_2357" [id=2949, type=ReduceMean]; +"2950 Constant_2358" [id=2950, type=Constant]; +"2951 Add_2359" [id=2951, type=Add]; +"2952 Sqrt_2360" [id=2952, type=Sqrt]; +"2953 Div_2361" [id=2953, type=Div]; +"2954 Mul_2362" [id=2954, type=Mul]; +"2955 Add_2363" [id=2955, type=Add]; +"2956 QuantizeLinear_2862_1" [id=2956, type=QuantizeLinear]; +"2957 DequantizeLinear_2862_1" [id=2957, type=DequantizeLinear]; +"2958 Shape_2364" [id=2958, type=Shape]; +"2959 Constant_2365" [id=2959, type=Constant]; +"2960 Gather_2366" [id=2960, type=Gather]; +"2961 Shape_2367" [id=2961, type=Shape]; +"2962 Constant_2368" [id=2962, type=Constant]; +"2963 Gather_2369" [id=2963, type=Gather]; +"2964 Shape_2370" [id=2964, type=Shape]; +"2965 Constant_2371" [id=2965, type=Constant]; +"2966 Gather_2372" [id=2966, type=Gather]; +"2967 Constant_nncf_2741" [id=2967, type=Constant]; +"2968 Unsqueeze_2373" [id=2968, type=Unsqueeze]; +"2969 Concat_2374" [id=2969, type=Concat]; +"2970 Reshape_2375" [id=2970, type=Reshape]; +"2971 QuantizeLinear_h.11.attn.c_attn.weight_1" [id=2971, type=QuantizeLinear]; +"2972 DequantizeLinear_h.11.attn.c_attn.weight_1" [id=2972, type=DequantizeLinear]; +"2973 Gemm_2376" [id=2973, type=Gemm]; +"2974 Constant_nncf_2746" [id=2974, type=Constant]; +"2975 Unsqueeze_2377" [id=2975, type=Unsqueeze]; +"2976 Constant_nncf_2748" [id=2976, type=Constant]; +"2977 Unsqueeze_2378" [id=2977, type=Unsqueeze]; +"2978 Concat_2379" [id=2978, type=Concat]; +"2979 Reshape_2380" [id=2979, type=Reshape]; +"2980 Constant_nncf_2752" [id=2980, type=Constant]; +"2981 Split_2381" [id=2981, type=Split]; +"2982 QuantizeLinear_query.23_1" [id=2982, type=QuantizeLinear]; +"2983 DequantizeLinear_query.23_1" [id=2983, type=DequantizeLinear]; +"2984 Shape_2382" [id=2984, type=Shape]; +"2985 Constant_2383" [id=2985, type=Constant]; +"2986 Gather_2384" [id=2986, type=Gather]; +"2987 Shape_2385" [id=2987, type=Shape]; +"2988 Constant_2386" [id=2988, type=Constant]; +"2989 Gather_2387" [id=2989, type=Gather]; +"2990 Shape_2388" [id=2990, type=Shape]; +"2991 Constant_2389" [id=2991, type=Constant]; +"2992 Gather_2390" [id=2992, type=Gather]; +"2993 Constant_2391" [id=2993, type=Constant]; +"2994 Div_2392" [id=2994, type=Div]; +"2995 Cast_2393" [id=2995, type=Cast]; +"2996 Cast_2394" [id=2996, type=Cast]; +"2997 Constant_nncf_2767" [id=2997, type=Constant]; +"2998 Unsqueeze_2395" [id=2998, type=Unsqueeze]; +"2999 Constant_nncf_2769" [id=2999, type=Constant]; +"3000 Unsqueeze_2396" [id=3000, type=Unsqueeze]; +"3001 Constant_nncf_2771" [id=3001, type=Constant]; +"3002 Unsqueeze_2397" [id=3002, type=Unsqueeze]; +"3003 Concat_2398" [id=3003, type=Concat]; +"3004 Reshape_2399" [id=3004, type=Reshape]; +"3005 Transpose_2400" [id=3005, type=Transpose]; +"3006 Shape_2401" [id=3006, type=Shape]; +"3007 Constant_2402" [id=3007, type=Constant]; +"3008 Gather_2403" [id=3008, type=Gather]; +"3009 Shape_2404" [id=3009, type=Shape]; +"3010 Constant_2405" [id=3010, type=Constant]; +"3011 Gather_2406" [id=3011, type=Gather]; +"3012 Shape_2407" [id=3012, type=Shape]; +"3013 Constant_2408" [id=3013, type=Constant]; +"3014 Gather_2409" [id=3014, type=Gather]; +"3015 Constant_2410" [id=3015, type=Constant]; +"3016 Div_2411" [id=3016, type=Div]; +"3017 Cast_2412" [id=3017, type=Cast]; +"3018 Cast_2413" [id=3018, type=Cast]; +"3019 Constant_nncf_2789" [id=3019, type=Constant]; +"3020 Unsqueeze_2414" [id=3020, type=Unsqueeze]; +"3021 Constant_nncf_2791" [id=3021, type=Constant]; +"3022 Unsqueeze_2415" [id=3022, type=Unsqueeze]; +"3023 Constant_nncf_2793" [id=3023, type=Constant]; +"3024 Unsqueeze_2416" [id=3024, type=Unsqueeze]; +"3025 Concat_2417" [id=3025, type=Concat]; +"3026 Reshape_2418" [id=3026, type=Reshape]; +"3027 QuantizeLinear_2927_1" [id=3027, type=QuantizeLinear]; +"3028 DequantizeLinear_2927_1" [id=3028, type=DequantizeLinear]; +"3029 Transpose_2419" [id=3029, type=Transpose]; +"3030 Shape_2420" [id=3030, type=Shape]; +"3031 Constant_2421" [id=3031, type=Constant]; +"3032 Gather_2422" [id=3032, type=Gather]; +"3033 Shape_2423" [id=3033, type=Shape]; +"3034 Constant_2424" [id=3034, type=Constant]; +"3035 Gather_2425" [id=3035, type=Gather]; +"3036 Shape_2426" [id=3036, type=Shape]; +"3037 Constant_2427" [id=3037, type=Constant]; +"3038 Gather_2428" [id=3038, type=Gather]; +"3039 Constant_2429" [id=3039, type=Constant]; +"3040 Div_2430" [id=3040, type=Div]; +"3041 Cast_2431" [id=3041, type=Cast]; +"3042 Cast_2432" [id=3042, type=Cast]; +"3043 Constant_nncf_2811" [id=3043, type=Constant]; +"3044 Unsqueeze_2433" [id=3044, type=Unsqueeze]; +"3045 Constant_nncf_2813" [id=3045, type=Constant]; +"3046 Unsqueeze_2434" [id=3046, type=Unsqueeze]; +"3047 Constant_nncf_2815" [id=3047, type=Constant]; +"3048 Unsqueeze_2435" [id=3048, type=Unsqueeze]; +"3049 Concat_2436" [id=3049, type=Concat]; +"3050 Reshape_2437" [id=3050, type=Reshape]; +"3051 Transpose_2438" [id=3051, type=Transpose]; +"3052 Transpose_2439" [id=3052, type=Transpose]; +"3053 Constant_nncf_2821" [id=3053, type=Constant]; +"3054 Unsqueeze_2440" [id=3054, type=Unsqueeze]; +"3055 Constant_nncf_2823" [id=3055, type=Constant]; +"3056 Unsqueeze_2441" [id=3056, type=Unsqueeze]; +"3057 Concat_2442" [id=3057, type=Concat]; +"3058 MatMul_2443" [id=3058, type=MatMul]; +"3059 Constant_2444" [id=3059, type=Constant]; +"3060 Div_2445" [id=3060, type=Div]; +"3061 Shape_2446" [id=3061, type=Shape]; +"3062 Constant_2447" [id=3062, type=Constant]; +"3063 Gather_2448" [id=3063, type=Gather]; +"3064 Shape_2449" [id=3064, type=Shape]; +"3065 Constant_2450" [id=3065, type=Constant]; +"3066 Gather_2451" [id=3066, type=Gather]; +"3067 Sub_2452" [id=3067, type=Sub]; +"3068 Constant_nncf_2836" [id=3068, type=Constant]; +"3069 Unsqueeze_2453" [id=3069, type=Unsqueeze]; +"3070 Constant_nncf_2838" [id=3070, type=Constant]; +"3071 Unsqueeze_2454" [id=3071, type=Unsqueeze]; +"3072 Constant_2455" [id=3072, type=Constant]; +"3073 Slice_2456" [id=3073, type=Slice]; +"3074 Constant_nncf_2842" [id=3074, type=Constant]; +"3075 Unsqueeze_2457" [id=3075, type=Unsqueeze]; +"3076 Constant_2458" [id=3076, type=Constant]; +"3077 Slice_2459" [id=3077, type=Slice]; +"3078 Mul_2460" [id=3078, type=Mul]; +"3079 Constant_2461" [id=3079, type=Constant]; +"3080 Sub_2462" [id=3080, type=Sub]; +"3081 Constant_2463" [id=3081, type=Constant]; +"3082 Mul_2464" [id=3082, type=Mul]; +"3083 Sub_2465" [id=3083, type=Sub]; +"3084 Shape_nncf_2852" [id=3084, type=Shape]; +"3085 Flatten_nncf_2853" [id=3085, type=Flatten]; +"3086 Softmax_2466" [id=3086, type=Softmax]; +"3087 Reshape_nncf_2855" [id=3087, type=Reshape]; +"3088 MatMul_2467" [id=3088, type=MatMul]; +"3089 QuantizeLinear_2984_1" [id=3089, type=QuantizeLinear]; +"3090 DequantizeLinear_2984_1" [id=3090, type=DequantizeLinear]; +"3091 Transpose_2468" [id=3091, type=Transpose]; +"3092 Shape_2469" [id=3092, type=Shape]; +"3093 Constant_2470" [id=3093, type=Constant]; +"3094 Gather_2471" [id=3094, type=Gather]; +"3095 Shape_2472" [id=3095, type=Shape]; +"3096 Constant_2473" [id=3096, type=Constant]; +"3097 Gather_2474" [id=3097, type=Gather]; +"3098 Shape_2475" [id=3098, type=Shape]; +"3099 Constant_2476" [id=3099, type=Constant]; +"3100 Gather_2477" [id=3100, type=Gather]; +"3101 Shape_2478" [id=3101, type=Shape]; +"3102 Constant_2479" [id=3102, type=Constant]; +"3103 Gather_2480" [id=3103, type=Gather]; +"3104 Mul_2481" [id=3104, type=Mul]; +"3105 Constant_nncf_2871" [id=3105, type=Constant]; +"3106 Unsqueeze_2482" [id=3106, type=Unsqueeze]; +"3107 Constant_nncf_2873" [id=3107, type=Constant]; +"3108 Unsqueeze_2483" [id=3108, type=Unsqueeze]; +"3109 Constant_nncf_2875" [id=3109, type=Constant]; +"3110 Unsqueeze_2484" [id=3110, type=Unsqueeze]; +"3111 Concat_2485" [id=3111, type=Concat]; +"3112 Reshape_2486" [id=3112, type=Reshape]; +"3113 Shape_2487" [id=3113, type=Shape]; +"3114 Constant_2488" [id=3114, type=Constant]; +"3115 Gather_2489" [id=3115, type=Gather]; +"3116 Shape_2490" [id=3116, type=Shape]; +"3117 Constant_2491" [id=3117, type=Constant]; +"3118 Gather_2492" [id=3118, type=Gather]; +"3119 Shape_2493" [id=3119, type=Shape]; +"3120 Constant_2494" [id=3120, type=Constant]; +"3121 Gather_2495" [id=3121, type=Gather]; +"3122 Constant_nncf_2888" [id=3122, type=Constant]; +"3123 Unsqueeze_2496" [id=3123, type=Unsqueeze]; +"3124 Concat_2497" [id=3124, type=Concat]; +"3125 Reshape_2498" [id=3125, type=Reshape]; +"3126 QuantizeLinear_h.11.attn.c_proj.weight_1" [id=3126, type=QuantizeLinear]; +"3127 DequantizeLinear_h.11.attn.c_proj.weight_1" [id=3127, type=DequantizeLinear]; +"3128 Gemm_2499" [id=3128, type=Gemm]; +"3129 Constant_nncf_2893" [id=3129, type=Constant]; +"3130 Unsqueeze_2500" [id=3130, type=Unsqueeze]; +"3131 Constant_nncf_2895" [id=3131, type=Constant]; +"3132 Unsqueeze_2501" [id=3132, type=Unsqueeze]; +"3133 Concat_2502" [id=3133, type=Concat]; +"3134 Reshape_2503" [id=3134, type=Reshape]; +"3135 Add_2504" [id=3135, type=Add]; +"3136 ReduceMean_2505" [id=3136, type=ReduceMean]; +"3137 Sub_2506" [id=3137, type=Sub]; +"3138 Constant_2507" [id=3138, type=Constant]; +"3139 Pow_2508" [id=3139, type=Pow]; +"3140 ReduceMean_2509" [id=3140, type=ReduceMean]; +"3141 Constant_2510" [id=3141, type=Constant]; +"3142 Add_2511" [id=3142, type=Add]; +"3143 Sqrt_2512" [id=3143, type=Sqrt]; +"3144 Div_2513" [id=3144, type=Div]; +"3145 Mul_2514" [id=3145, type=Mul]; +"3146 Add_2515" [id=3146, type=Add]; +"3147 QuantizeLinear_3036_1" [id=3147, type=QuantizeLinear]; +"3148 DequantizeLinear_3036_1" [id=3148, type=DequantizeLinear]; +"3149 Shape_2516" [id=3149, type=Shape]; +"3150 Constant_2517" [id=3150, type=Constant]; +"3151 Gather_2518" [id=3151, type=Gather]; +"3152 Shape_2519" [id=3152, type=Shape]; +"3153 Constant_2520" [id=3153, type=Constant]; +"3154 Gather_2521" [id=3154, type=Gather]; +"3155 Shape_2522" [id=3155, type=Shape]; +"3156 Constant_2523" [id=3156, type=Constant]; +"3157 Gather_2524" [id=3157, type=Gather]; +"3158 Constant_nncf_2920" [id=3158, type=Constant]; +"3159 Unsqueeze_2525" [id=3159, type=Unsqueeze]; +"3160 Concat_2526" [id=3160, type=Concat]; +"3161 Reshape_2527" [id=3161, type=Reshape]; +"3162 QuantizeLinear_h.11.mlp.c_fc.weight_1" [id=3162, type=QuantizeLinear]; +"3163 DequantizeLinear_h.11.mlp.c_fc.weight_1" [id=3163, type=DequantizeLinear]; +"3164 Gemm_2528" [id=3164, type=Gemm]; +"3165 Constant_nncf_2925" [id=3165, type=Constant]; +"3166 Unsqueeze_2529" [id=3166, type=Unsqueeze]; +"3167 Constant_nncf_2927" [id=3167, type=Constant]; +"3168 Unsqueeze_2530" [id=3168, type=Unsqueeze]; +"3169 Concat_2531" [id=3169, type=Concat]; +"3170 Reshape_2532" [id=3170, type=Reshape]; +"3171 Constant_2533" [id=3171, type=Constant]; +"3172 Mul_2534" [id=3172, type=Mul]; +"3173 Constant_2535" [id=3173, type=Constant]; +"3174 Pow_2536" [id=3174, type=Pow]; +"3175 Constant_2537" [id=3175, type=Constant]; +"3176 Mul_2538" [id=3176, type=Mul]; +"3177 Add_2539" [id=3177, type=Add]; +"3178 Constant_2540" [id=3178, type=Constant]; +"3179 Mul_2541" [id=3179, type=Mul]; +"3180 Tanh_2542" [id=3180, type=Tanh]; +"3181 Constant_2543" [id=3181, type=Constant]; +"3182 Add_2544" [id=3182, type=Add]; +"3183 Mul_2545" [id=3183, type=Mul]; +"3184 QuantizeLinear_3070_1" [id=3184, type=QuantizeLinear]; +"3185 DequantizeLinear_3070_1" [id=3185, type=DequantizeLinear]; +"3186 Shape_2546" [id=3186, type=Shape]; +"3187 Constant_2547" [id=3187, type=Constant]; +"3188 Gather_2548" [id=3188, type=Gather]; +"3189 Shape_2549" [id=3189, type=Shape]; +"3190 Constant_2550" [id=3190, type=Constant]; +"3191 Gather_2551" [id=3191, type=Gather]; +"3192 Shape_2552" [id=3192, type=Shape]; +"3193 Constant_2553" [id=3193, type=Constant]; +"3194 Gather_2554" [id=3194, type=Gather]; +"3195 Constant_nncf_2953" [id=3195, type=Constant]; +"3196 Unsqueeze_2555" [id=3196, type=Unsqueeze]; +"3197 Concat_2556" [id=3197, type=Concat]; +"3198 Reshape_2557" [id=3198, type=Reshape]; +"3199 QuantizeLinear_h.11.mlp.c_proj.weight_1" [id=3199, type=QuantizeLinear]; +"3200 DequantizeLinear_h.11.mlp.c_proj.weight_1" [id=3200, type=DequantizeLinear]; +"3201 Gemm_2558" [id=3201, type=Gemm]; +"3202 Constant_nncf_2958" [id=3202, type=Constant]; +"3203 Unsqueeze_2559" [id=3203, type=Unsqueeze]; +"3204 Constant_nncf_2960" [id=3204, type=Constant]; +"3205 Unsqueeze_2560" [id=3205, type=Unsqueeze]; +"3206 Concat_2561" [id=3206, type=Concat]; +"3207 Reshape_2562" [id=3207, type=Reshape]; +"3208 Add_2563" [id=3208, type=Add]; +"3209 ReduceMean_2564" [id=3209, type=ReduceMean]; +"3210 Sub_2565" [id=3210, type=Sub]; +"3211 Constant_2566" [id=3211, type=Constant]; +"3212 Pow_2567" [id=3212, type=Pow]; +"3213 ReduceMean_2568" [id=3213, type=ReduceMean]; +"3214 Constant_2569" [id=3214, type=Constant]; +"3215 Add_2570" [id=3215, type=Add]; +"3216 Sqrt_2571" [id=3216, type=Sqrt]; +"3217 Div_2572" [id=3217, type=Div]; +"3218 Mul_2573" [id=3218, type=Mul]; +"3219 Add_2574" [id=3219, type=Add]; +"3220 Constant_nncf_2976" [id=3220, type=Constant]; +"3221 Unsqueeze_2575" [id=3221, type=Unsqueeze]; +"3222 Constant_nncf_2978" [id=3222, type=Constant]; +"3223 Unsqueeze_2576" [id=3223, type=Unsqueeze]; +"3224 Constant_nncf_2980" [id=3224, type=Constant]; +"3225 Unsqueeze_2577" [id=3225, type=Unsqueeze]; +"3226 Constant_nncf_2982" [id=3226, type=Constant]; +"3227 Unsqueeze_2578" [id=3227, type=Unsqueeze]; +"3228 Concat_2579" [id=3228, type=Concat]; +"3229 Reshape_2580" [id=3229, type=Reshape]; +"3230 nncf_model_input_0" [id=3230, type=nncf_model_input]; +"3231 nncf_model_output_0" [id=3231, type=nncf_model_output]; +"3232 nncf_model_output_1" [id=3232, type=nncf_model_output]; +"3233 nncf_model_output_2" [id=3233, type=nncf_model_output]; +"3234 nncf_model_output_3" [id=3234, type=nncf_model_output]; +"3235 nncf_model_output_4" [id=3235, type=nncf_model_output]; +"3236 nncf_model_output_5" [id=3236, type=nncf_model_output]; +"3237 nncf_model_output_6" [id=3237, type=nncf_model_output]; +"3238 nncf_model_output_7" [id=3238, type=nncf_model_output]; +"3239 nncf_model_output_8" [id=3239, type=nncf_model_output]; +"3240 nncf_model_output_9" [id=3240, type=nncf_model_output]; +"3241 nncf_model_output_10" [id=3241, type=nncf_model_output]; +"3242 nncf_model_output_11" [id=3242, type=nncf_model_output]; +"3243 nncf_model_output_12" [id=3243, type=nncf_model_output]; "0 Shape_0" -> "2 Gather_2" [label="[3]", style=dashed]; "1 Constant_1" -> "2 Gather_2" [label="[]", style=dashed]; -"2 Gather_2" -> "2819 Unsqueeze_2575" [label="[]", style=dashed]; +"2 Gather_2" -> "3221 Unsqueeze_2575" [label="[]", style=dashed]; "3 Shape_3" -> "5 Gather_5" [label="[3]", style=dashed]; "4 Constant_4" -> "5 Gather_5" [label="[]", style=dashed]; -"5 Gather_5" -> "2820 Unsqueeze_2576" [label="[]", style=dashed]; +"5 Gather_5" -> "3223 Unsqueeze_2576" [label="[]", style=dashed]; "6 Shape_6" -> "8 Gather_8" [label="[3]", style=dashed]; "7 Constant_7" -> "8 Gather_8" [label="[]", style=dashed]; -"8 Gather_8" -> "9 Unsqueeze_9" [label="[]", style=dashed]; -"8 Gather_8" -> "12 Unsqueeze_12" [label="[]", style=dashed]; -"8 Gather_8" -> "23 Unsqueeze_23" [label="[]", style=dashed]; -"8 Gather_8" -> "2821 Unsqueeze_2577" [label="[]", style=dashed]; -"9 Unsqueeze_9" -> "10 Concat_10" [label="[1]", style=dashed]; -"10 Concat_10" -> "11 Reshape_11" [label="[2]", style=dashed]; -"11 Reshape_11" -> "28 Gather_26" [label="[]", style=dashed]; -"12 Unsqueeze_12" -> "13 Sub_13" [label="[1]", style=dashed]; -"13 Sub_13" -> "14 Div_14" [label="[1]", style=dashed]; -"14 Div_14" -> "15 ConstantOfShape_15" [label="[1]", style=dashed]; -"15 ConstantOfShape_15" -> "16 NonZero_16" [label="[-1]", style=dashed]; -"16 NonZero_16" -> "17 Transpose_17" [label="[1, -1]", style=dashed]; -"17 Transpose_17" -> "18 Squeeze_18" [label="[-1, 1]", style=dashed]; -"18 Squeeze_18" -> "19 Mul_19" [label="[-1]", style=dashed]; -"19 Mul_19" -> "20 Add_20" [label="[-1]", style=dashed]; -"20 Add_20" -> "21 Cast_21" [label="[-1]", style=dashed]; -"21 Cast_21" -> "22 Unsqueeze_22" [label="[-1]", style=dashed]; -"22 Unsqueeze_22" -> "25 Reshape_25" [label="[1, -1]", style=dashed]; -"23 Unsqueeze_23" -> "24 Concat_24" [label="[1]", style=dashed]; -"24 Concat_24" -> "25 Reshape_25" [label="[2]", style=dashed]; -"25 Reshape_25" -> "31 Gather_27" [label="[]", style=dashed]; -"26 QuantizeLinear_wte.weight_1" -> "27 DequantizeLinear_wte.weight_1" [label="[50257, 768]", style=dashed]; -"27 DequantizeLinear_wte.weight_1" -> "28 Gather_26" [label="[50257, 768]", style=solid]; -"28 Gather_26" -> "32 Add_28" [label="[]", style=solid]; -"29 QuantizeLinear_wpe.weight_1" -> "30 DequantizeLinear_wpe.weight_1" [label="[1024, 768]", style=dashed]; -"30 DequantizeLinear_wpe.weight_1" -> "31 Gather_27" [label="[1024, 768]", style=solid]; -"31 Gather_27" -> "32 Add_28" [label="[]", style=solid]; -"32 Add_28" -> "33 Shape_29" [label="[]", style=solid]; -"32 Add_28" -> "36 ReduceMean_32" [label="[]", style=solid]; -"32 Add_28" -> "37 Sub_33" [label="[]", style=solid]; -"32 Add_28" -> "199 Add_183" [label="[]", style=solid]; -"33 Shape_29" -> "35 Gather_31" [label="[-1]", style=dashed]; -"34 Constant_30" -> "35 Gather_31" [label="[]", style=dashed]; -"35 Gather_31" -> "2822 Unsqueeze_2578" [label="[]", style=dashed]; -"36 ReduceMean_32" -> "37 Sub_33" [label="[]", style=solid]; -"37 Sub_33" -> "39 Pow_35" [label="[]", style=solid]; -"37 Sub_33" -> "44 Div_40" [label="[]", style=solid]; -"38 Constant_34" -> "39 Pow_35" [label="[]", style=solid]; -"39 Pow_35" -> "40 ReduceMean_36" [label="[]", style=solid]; -"40 ReduceMean_36" -> "42 Add_38" [label="[]", style=solid]; -"41 Constant_37" -> "42 Add_38" [label="[]", style=solid]; -"42 Add_38" -> "43 Sqrt_39" [label="[]", style=solid]; -"43 Sqrt_39" -> "44 Div_40" [label="[]", style=solid]; -"44 Div_40" -> "45 Mul_41" [label="[]", style=solid]; -"45 Mul_41" -> "46 Add_42" [label="[]", style=solid]; -"46 Add_42" -> "47 QuantizeLinear_211_1" [label="[]", style=solid]; -"47 QuantizeLinear_211_1" -> "48 DequantizeLinear_211_1" [label="[]", style=dashed]; -"48 DequantizeLinear_211_1" -> "49 Shape_43" [label="[]", style=solid]; -"48 DequantizeLinear_211_1" -> "52 Shape_46" [label="[]", style=solid]; -"48 DequantizeLinear_211_1" -> "55 Shape_49" [label="[]", style=solid]; -"48 DequantizeLinear_211_1" -> "60 Reshape_54" [label="[]", style=solid]; -"49 Shape_43" -> "51 Gather_45" [label="[-1]", style=dashed]; -"50 Constant_44" -> "51 Gather_45" [label="[]", style=dashed]; -"51 Gather_45" -> "64 Unsqueeze_56" [label="[]", style=dashed]; -"52 Shape_46" -> "54 Gather_48" [label="[-1]", style=dashed]; -"53 Constant_47" -> "54 Gather_48" [label="[]", style=dashed]; -"54 Gather_48" -> "65 Unsqueeze_57" [label="[]", style=dashed]; -"55 Shape_49" -> "57 Gather_51" [label="[-1]", style=dashed]; -"56 Constant_50" -> "57 Gather_51" [label="[]", style=dashed]; -"57 Gather_51" -> "58 Unsqueeze_52" [label="[]", style=dashed]; -"58 Unsqueeze_52" -> "59 Concat_53" [label="[1]", style=dashed]; -"59 Concat_53" -> "60 Reshape_54" [label="[2]", style=dashed]; -"60 Reshape_54" -> "63 Gemm_55" [label="[]", style=solid]; -"61 QuantizeLinear_h.0.attn.c_attn.weight_1" -> "62 DequantizeLinear_h.0.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; -"62 DequantizeLinear_h.0.attn.c_attn.weight_1" -> "63 Gemm_55" [label="[768, 2304]", style=solid]; -"63 Gemm_55" -> "67 Reshape_59" [label="[]", style=solid]; -"64 Unsqueeze_56" -> "66 Concat_58" [label="[1]", style=dashed]; -"65 Unsqueeze_57" -> "66 Concat_58" [label="[1]", style=dashed]; -"66 Concat_58" -> "67 Reshape_59" [label="[3]", style=dashed]; -"67 Reshape_59" -> "68 Split_60" [label="[]", style=solid]; -"68 Split_60" -> "69 QuantizeLinear_query.1_1" [label="[]", style=solid]; -"68 Split_60" -> "71 Shape_61" [label="[]", style=solid]; -"68 Split_60" -> "74 Shape_64" [label="[]", style=solid]; -"68 Split_60" -> "77 Shape_67" [label="[]", style=solid]; -"68 Split_60" -> "90 Shape_80" [label="[]", style=solid]; -"68 Split_60" -> "93 Shape_83" [label="[]", style=solid]; -"68 Split_60" -> "96 Shape_86" [label="[]", style=solid]; -"68 Split_60" -> "107 Reshape_97" [label="[]", style=solid]; -"68 Split_60" -> "111 Shape_99" [label="[]", style=solid]; -"68 Split_60" -> "114 Shape_102" [label="[]", style=solid]; -"68 Split_60" -> "117 Shape_105" [label="[]", style=solid]; -"68 Split_60" -> "128 Reshape_116" [label="[]", style=solid]; -"69 QuantizeLinear_query.1_1" -> "70 DequantizeLinear_query.1_1" [label="[]", style=dashed]; -"70 DequantizeLinear_query.1_1" -> "88 Reshape_78" [label="[]", style=solid]; -"71 Shape_61" -> "73 Gather_63" [label="[-1]", style=dashed]; -"72 Constant_62" -> "73 Gather_63" [label="[]", style=dashed]; -"73 Gather_63" -> "84 Unsqueeze_74" [label="[]", style=dashed]; -"74 Shape_64" -> "76 Gather_66" [label="[-1]", style=dashed]; -"75 Constant_65" -> "76 Gather_66" [label="[]", style=dashed]; -"76 Gather_66" -> "85 Unsqueeze_75" [label="[]", style=dashed]; -"77 Shape_67" -> "79 Gather_69" [label="[-1]", style=dashed]; -"78 Constant_68" -> "79 Gather_69" [label="[]", style=dashed]; -"79 Gather_69" -> "81 Div_71" [label="[]", style=dashed]; -"80 Constant_70" -> "81 Div_71" [label="[]", style=dashed]; -"81 Div_71" -> "82 Cast_72" [label="[]", style=dashed]; -"82 Cast_72" -> "83 Cast_73" [label="[]", style=dashed]; -"83 Cast_73" -> "86 Unsqueeze_76" [label="[]", style=dashed]; -"84 Unsqueeze_74" -> "87 Concat_77" [label="[1]", style=dashed]; -"85 Unsqueeze_75" -> "87 Concat_77" [label="[1]", style=dashed]; -"86 Unsqueeze_76" -> "87 Concat_77" [label="[1]", style=dashed]; -"87 Concat_77" -> "88 Reshape_78" [label="[4]", style=dashed]; -"88 Reshape_78" -> "89 Transpose_79" [label="[]", style=solid]; -"89 Transpose_79" -> "134 MatMul_122" [label="[]", style=solid]; -"90 Shape_80" -> "92 Gather_82" [label="[-1]", style=dashed]; -"91 Constant_81" -> "92 Gather_82" [label="[]", style=dashed]; -"92 Gather_82" -> "103 Unsqueeze_93" [label="[]", style=dashed]; -"93 Shape_83" -> "95 Gather_85" [label="[-1]", style=dashed]; -"94 Constant_84" -> "95 Gather_85" [label="[]", style=dashed]; -"95 Gather_85" -> "104 Unsqueeze_94" [label="[]", style=dashed]; -"96 Shape_86" -> "98 Gather_88" [label="[-1]", style=dashed]; -"97 Constant_87" -> "98 Gather_88" [label="[]", style=dashed]; -"98 Gather_88" -> "100 Div_90" [label="[]", style=dashed]; -"99 Constant_89" -> "100 Div_90" [label="[]", style=dashed]; -"100 Div_90" -> "101 Cast_91" [label="[]", style=dashed]; -"101 Cast_91" -> "102 Cast_92" [label="[]", style=dashed]; -"102 Cast_92" -> "105 Unsqueeze_95" [label="[]", style=dashed]; -"103 Unsqueeze_93" -> "106 Concat_96" [label="[1]", style=dashed]; -"104 Unsqueeze_94" -> "106 Concat_96" [label="[1]", style=dashed]; -"105 Unsqueeze_95" -> "106 Concat_96" [label="[1]", style=dashed]; -"106 Concat_96" -> "107 Reshape_97" [label="[4]", style=dashed]; -"107 Reshape_97" -> "108 QuantizeLinear_276_1" [label="[]", style=solid]; -"107 Reshape_97" -> "130 Transpose_118" [label="[]", style=solid]; -"108 QuantizeLinear_276_1" -> "109 DequantizeLinear_276_1" [label="[]", style=dashed]; -"109 DequantizeLinear_276_1" -> "110 Transpose_98" [label="[]", style=solid]; -"110 Transpose_98" -> "134 MatMul_122" [label="[]", style=solid]; -"111 Shape_99" -> "113 Gather_101" [label="[-1]", style=dashed]; -"112 Constant_100" -> "113 Gather_101" [label="[]", style=dashed]; -"113 Gather_101" -> "124 Unsqueeze_112" [label="[]", style=dashed]; -"114 Shape_102" -> "116 Gather_104" [label="[-1]", style=dashed]; -"115 Constant_103" -> "116 Gather_104" [label="[]", style=dashed]; -"116 Gather_104" -> "125 Unsqueeze_113" [label="[]", style=dashed]; -"117 Shape_105" -> "119 Gather_107" [label="[-1]", style=dashed]; -"118 Constant_106" -> "119 Gather_107" [label="[]", style=dashed]; -"119 Gather_107" -> "121 Div_109" [label="[]", style=dashed]; -"120 Constant_108" -> "121 Div_109" [label="[]", style=dashed]; -"121 Div_109" -> "122 Cast_110" [label="[]", style=dashed]; -"122 Cast_110" -> "123 Cast_111" [label="[]", style=dashed]; -"123 Cast_111" -> "126 Unsqueeze_114" [label="[]", style=dashed]; -"124 Unsqueeze_112" -> "127 Concat_115" [label="[1]", style=dashed]; -"125 Unsqueeze_113" -> "127 Concat_115" [label="[1]", style=dashed]; -"126 Unsqueeze_114" -> "127 Concat_115" [label="[1]", style=dashed]; -"127 Concat_115" -> "128 Reshape_116" [label="[4]", style=dashed]; -"128 Reshape_116" -> "129 Transpose_117" [label="[]", style=solid]; -"129 Transpose_117" -> "132 Unsqueeze_120" [label="[]", style=solid]; -"129 Transpose_117" -> "158 MatMul_146" [label="[]", style=solid]; -"130 Transpose_118" -> "131 Unsqueeze_119" [label="[]", style=solid]; -"131 Unsqueeze_119" -> "133 Concat_121" [label="[]", style=solid]; -"132 Unsqueeze_120" -> "133 Concat_121" [label="[]", style=solid]; -"133 Concat_121" -> "2827 nncf_model_output_1" [label="[2, 1, 12, 8, 64]", style=solid]; -"134 MatMul_122" -> "136 Div_124" [label="[]", style=solid]; -"135 Constant_123" -> "136 Div_124" [label="[]", style=solid]; -"136 Div_124" -> "137 Shape_125" [label="[]", style=solid]; -"136 Div_124" -> "140 Shape_128" [label="[]", style=solid]; -"136 Div_124" -> "151 Mul_139" [label="[]", style=solid]; -"137 Shape_125" -> "139 Gather_127" [label="[-1]", style=dashed]; -"138 Constant_126" -> "139 Gather_127" [label="[]", style=dashed]; -"139 Gather_127" -> "143 Sub_131" [label="[]", style=dashed]; -"140 Shape_128" -> "142 Gather_130" [label="[-1]", style=dashed]; -"141 Constant_129" -> "142 Gather_130" [label="[]", style=dashed]; -"142 Gather_130" -> "143 Sub_131" [label="[]", style=dashed]; -"142 Gather_130" -> "145 Unsqueeze_133" [label="[]", style=dashed]; -"142 Gather_130" -> "148 Unsqueeze_136" [label="[]", style=dashed]; -"143 Sub_131" -> "144 Unsqueeze_132" [label="[]", style=dashed]; -"144 Unsqueeze_132" -> "147 Slice_135" [label="[1]", style=dashed]; -"145 Unsqueeze_133" -> "147 Slice_135" [label="[1]", style=dashed]; -"146 Constant_134" -> "147 Slice_135" [label="[1]", style=dashed]; -"147 Slice_135" -> "150 Slice_138" [label="[]", style=solid]; -"148 Unsqueeze_136" -> "150 Slice_138" [label="[1]", style=dashed]; -"149 Constant_137" -> "150 Slice_138" [label="[1]", style=dashed]; -"150 Slice_138" -> "151 Mul_139" [label="[]", style=solid]; -"150 Slice_138" -> "153 Sub_141" [label="[]", style=solid]; -"151 Mul_139" -> "156 Sub_144" [label="[]", style=solid]; -"152 Constant_140" -> "153 Sub_141" [label="[]", style=solid]; -"153 Sub_141" -> "155 Mul_143" [label="[]", style=solid]; -"154 Constant_142" -> "155 Mul_143" [label="[]", style=solid]; -"155 Mul_143" -> "156 Sub_144" [label="[]", style=solid]; -"156 Sub_144" -> "157 Softmax_145" [label="[]", style=solid]; -"157 Softmax_145" -> "158 MatMul_146" [label="[]", style=solid]; -"158 MatMul_146" -> "159 QuantizeLinear_333_1" [label="[]", style=solid]; -"159 QuantizeLinear_333_1" -> "160 DequantizeLinear_333_1" [label="[]", style=dashed]; -"160 DequantizeLinear_333_1" -> "161 Transpose_147" [label="[]", style=solid]; -"161 Transpose_147" -> "162 Shape_148" [label="[]", style=solid]; -"161 Transpose_147" -> "165 Shape_151" [label="[]", style=solid]; -"161 Transpose_147" -> "168 Shape_154" [label="[]", style=solid]; -"161 Transpose_147" -> "171 Shape_157" [label="[]", style=solid]; -"161 Transpose_147" -> "179 Reshape_165" [label="[]", style=solid]; -"162 Shape_148" -> "164 Gather_150" [label="[-1]", style=dashed]; -"163 Constant_149" -> "164 Gather_150" [label="[]", style=dashed]; -"164 Gather_150" -> "175 Unsqueeze_161" [label="[]", style=dashed]; -"165 Shape_151" -> "167 Gather_153" [label="[-1]", style=dashed]; -"166 Constant_152" -> "167 Gather_153" [label="[]", style=dashed]; -"167 Gather_153" -> "176 Unsqueeze_162" [label="[]", style=dashed]; -"168 Shape_154" -> "170 Gather_156" [label="[-1]", style=dashed]; -"169 Constant_155" -> "170 Gather_156" [label="[]", style=dashed]; -"170 Gather_156" -> "174 Mul_160" [label="[]", style=dashed]; -"171 Shape_157" -> "173 Gather_159" [label="[-1]", style=dashed]; -"172 Constant_158" -> "173 Gather_159" [label="[]", style=dashed]; -"173 Gather_159" -> "174 Mul_160" [label="[]", style=dashed]; -"174 Mul_160" -> "177 Unsqueeze_163" [label="[]", style=dashed]; -"175 Unsqueeze_161" -> "178 Concat_164" [label="[1]", style=dashed]; -"176 Unsqueeze_162" -> "178 Concat_164" [label="[1]", style=dashed]; -"177 Unsqueeze_163" -> "178 Concat_164" [label="[1]", style=dashed]; -"178 Concat_164" -> "179 Reshape_165" [label="[3]", style=dashed]; -"179 Reshape_165" -> "180 Shape_166" [label="[]", style=solid]; -"179 Reshape_165" -> "183 Shape_169" [label="[]", style=solid]; -"179 Reshape_165" -> "186 Shape_172" [label="[]", style=solid]; -"179 Reshape_165" -> "191 Reshape_177" [label="[]", style=solid]; -"180 Shape_166" -> "182 Gather_168" [label="[-1]", style=dashed]; -"181 Constant_167" -> "182 Gather_168" [label="[]", style=dashed]; -"182 Gather_168" -> "195 Unsqueeze_179" [label="[]", style=dashed]; -"183 Shape_169" -> "185 Gather_171" [label="[-1]", style=dashed]; -"184 Constant_170" -> "185 Gather_171" [label="[]", style=dashed]; -"185 Gather_171" -> "196 Unsqueeze_180" [label="[]", style=dashed]; -"186 Shape_172" -> "188 Gather_174" [label="[-1]", style=dashed]; -"187 Constant_173" -> "188 Gather_174" [label="[]", style=dashed]; -"188 Gather_174" -> "189 Unsqueeze_175" [label="[]", style=dashed]; -"189 Unsqueeze_175" -> "190 Concat_176" [label="[1]", style=dashed]; -"190 Concat_176" -> "191 Reshape_177" [label="[2]", style=dashed]; -"191 Reshape_177" -> "194 Gemm_178" [label="[]", style=solid]; -"192 QuantizeLinear_h.0.attn.c_proj.weight_1" -> "193 DequantizeLinear_h.0.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; -"193 DequantizeLinear_h.0.attn.c_proj.weight_1" -> "194 Gemm_178" [label="[768, 768]", style=solid]; -"194 Gemm_178" -> "198 Reshape_182" [label="[]", style=solid]; -"195 Unsqueeze_179" -> "197 Concat_181" [label="[1]", style=dashed]; -"196 Unsqueeze_180" -> "197 Concat_181" [label="[1]", style=dashed]; -"197 Concat_181" -> "198 Reshape_182" [label="[3]", style=dashed]; -"198 Reshape_182" -> "199 Add_183" [label="[]", style=solid]; -"199 Add_183" -> "200 ReduceMean_184" [label="[]", style=solid]; -"199 Add_183" -> "201 Sub_185" [label="[]", style=solid]; -"199 Add_183" -> "266 Add_242" [label="[]", style=solid]; -"200 ReduceMean_184" -> "201 Sub_185" [label="[]", style=solid]; -"201 Sub_185" -> "203 Pow_187" [label="[]", style=solid]; -"201 Sub_185" -> "208 Div_192" [label="[]", style=solid]; -"202 Constant_186" -> "203 Pow_187" [label="[]", style=solid]; -"203 Pow_187" -> "204 ReduceMean_188" [label="[]", style=solid]; -"204 ReduceMean_188" -> "206 Add_190" [label="[]", style=solid]; -"205 Constant_189" -> "206 Add_190" [label="[]", style=solid]; -"206 Add_190" -> "207 Sqrt_191" [label="[]", style=solid]; -"207 Sqrt_191" -> "208 Div_192" [label="[]", style=solid]; -"208 Div_192" -> "209 Mul_193" [label="[]", style=solid]; -"209 Mul_193" -> "210 Add_194" [label="[]", style=solid]; -"210 Add_194" -> "211 QuantizeLinear_385_1" [label="[]", style=solid]; -"211 QuantizeLinear_385_1" -> "212 DequantizeLinear_385_1" [label="[]", style=dashed]; -"212 DequantizeLinear_385_1" -> "213 Shape_195" [label="[]", style=solid]; -"212 DequantizeLinear_385_1" -> "216 Shape_198" [label="[]", style=solid]; -"212 DequantizeLinear_385_1" -> "219 Shape_201" [label="[]", style=solid]; -"212 DequantizeLinear_385_1" -> "224 Reshape_206" [label="[]", style=solid]; -"213 Shape_195" -> "215 Gather_197" [label="[-1]", style=dashed]; -"214 Constant_196" -> "215 Gather_197" [label="[]", style=dashed]; -"215 Gather_197" -> "228 Unsqueeze_208" [label="[]", style=dashed]; -"216 Shape_198" -> "218 Gather_200" [label="[-1]", style=dashed]; -"217 Constant_199" -> "218 Gather_200" [label="[]", style=dashed]; -"218 Gather_200" -> "229 Unsqueeze_209" [label="[]", style=dashed]; -"219 Shape_201" -> "221 Gather_203" [label="[-1]", style=dashed]; -"220 Constant_202" -> "221 Gather_203" [label="[]", style=dashed]; -"221 Gather_203" -> "222 Unsqueeze_204" [label="[]", style=dashed]; -"222 Unsqueeze_204" -> "223 Concat_205" [label="[1]", style=dashed]; -"223 Concat_205" -> "224 Reshape_206" [label="[2]", style=dashed]; -"224 Reshape_206" -> "227 Gemm_207" [label="[]", style=solid]; -"225 QuantizeLinear_h.0.mlp.c_fc.weight_1" -> "226 DequantizeLinear_h.0.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; -"226 DequantizeLinear_h.0.mlp.c_fc.weight_1" -> "227 Gemm_207" [label="[768, 3072]", style=solid]; -"227 Gemm_207" -> "231 Reshape_211" [label="[]", style=solid]; -"228 Unsqueeze_208" -> "230 Concat_210" [label="[1]", style=dashed]; -"229 Unsqueeze_209" -> "230 Concat_210" [label="[1]", style=dashed]; -"230 Concat_210" -> "231 Reshape_211" [label="[3]", style=dashed]; -"231 Reshape_211" -> "233 Mul_213" [label="[]", style=solid]; -"231 Reshape_211" -> "235 Pow_215" [label="[]", style=solid]; -"231 Reshape_211" -> "238 Add_218" [label="[]", style=solid]; -"232 Constant_212" -> "233 Mul_213" [label="[]", style=solid]; -"233 Mul_213" -> "244 Mul_224" [label="[]", style=solid]; -"234 Constant_214" -> "235 Pow_215" [label="[]", style=solid]; -"235 Pow_215" -> "237 Mul_217" [label="[]", style=solid]; -"236 Constant_216" -> "237 Mul_217" [label="[]", style=solid]; -"237 Mul_217" -> "238 Add_218" [label="[]", style=solid]; -"238 Add_218" -> "240 Mul_220" [label="[]", style=solid]; -"239 Constant_219" -> "240 Mul_220" [label="[]", style=solid]; -"240 Mul_220" -> "241 Tanh_221" [label="[]", style=solid]; -"241 Tanh_221" -> "243 Add_223" [label="[]", style=solid]; -"242 Constant_222" -> "243 Add_223" [label="[]", style=solid]; -"243 Add_223" -> "244 Mul_224" [label="[]", style=solid]; -"244 Mul_224" -> "245 QuantizeLinear_419_1" [label="[]", style=solid]; -"245 QuantizeLinear_419_1" -> "246 DequantizeLinear_419_1" [label="[]", style=dashed]; -"246 DequantizeLinear_419_1" -> "247 Shape_225" [label="[]", style=solid]; -"246 DequantizeLinear_419_1" -> "250 Shape_228" [label="[]", style=solid]; -"246 DequantizeLinear_419_1" -> "253 Shape_231" [label="[]", style=solid]; -"246 DequantizeLinear_419_1" -> "258 Reshape_236" [label="[]", style=solid]; -"247 Shape_225" -> "249 Gather_227" [label="[-1]", style=dashed]; -"248 Constant_226" -> "249 Gather_227" [label="[]", style=dashed]; -"249 Gather_227" -> "262 Unsqueeze_238" [label="[]", style=dashed]; -"250 Shape_228" -> "252 Gather_230" [label="[-1]", style=dashed]; -"251 Constant_229" -> "252 Gather_230" [label="[]", style=dashed]; -"252 Gather_230" -> "263 Unsqueeze_239" [label="[]", style=dashed]; -"253 Shape_231" -> "255 Gather_233" [label="[-1]", style=dashed]; -"254 Constant_232" -> "255 Gather_233" [label="[]", style=dashed]; -"255 Gather_233" -> "256 Unsqueeze_234" [label="[]", style=dashed]; -"256 Unsqueeze_234" -> "257 Concat_235" [label="[1]", style=dashed]; -"257 Concat_235" -> "258 Reshape_236" [label="[2]", style=dashed]; -"258 Reshape_236" -> "261 Gemm_237" [label="[]", style=solid]; -"259 QuantizeLinear_h.0.mlp.c_proj.weight_1" -> "260 DequantizeLinear_h.0.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; -"260 DequantizeLinear_h.0.mlp.c_proj.weight_1" -> "261 Gemm_237" [label="[3072, 768]", style=solid]; -"261 Gemm_237" -> "265 Reshape_241" [label="[]", style=solid]; -"262 Unsqueeze_238" -> "264 Concat_240" [label="[1]", style=dashed]; -"263 Unsqueeze_239" -> "264 Concat_240" [label="[1]", style=dashed]; -"264 Concat_240" -> "265 Reshape_241" [label="[3]", style=dashed]; -"265 Reshape_241" -> "266 Add_242" [label="[]", style=solid]; -"266 Add_242" -> "267 ReduceMean_243" [label="[]", style=solid]; -"266 Add_242" -> "268 Sub_244" [label="[]", style=solid]; -"266 Add_242" -> "430 Add_394" [label="[]", style=solid]; -"267 ReduceMean_243" -> "268 Sub_244" [label="[]", style=solid]; -"268 Sub_244" -> "270 Pow_246" [label="[]", style=solid]; -"268 Sub_244" -> "275 Div_251" [label="[]", style=solid]; -"269 Constant_245" -> "270 Pow_246" [label="[]", style=solid]; -"270 Pow_246" -> "271 ReduceMean_247" [label="[]", style=solid]; -"271 ReduceMean_247" -> "273 Add_249" [label="[]", style=solid]; -"272 Constant_248" -> "273 Add_249" [label="[]", style=solid]; -"273 Add_249" -> "274 Sqrt_250" [label="[]", style=solid]; -"274 Sqrt_250" -> "275 Div_251" [label="[]", style=solid]; -"275 Div_251" -> "276 Mul_252" [label="[]", style=solid]; -"276 Mul_252" -> "277 Add_253" [label="[]", style=solid]; -"277 Add_253" -> "278 QuantizeLinear_452_1" [label="[]", style=solid]; -"278 QuantizeLinear_452_1" -> "279 DequantizeLinear_452_1" [label="[]", style=dashed]; -"279 DequantizeLinear_452_1" -> "280 Shape_254" [label="[]", style=solid]; -"279 DequantizeLinear_452_1" -> "283 Shape_257" [label="[]", style=solid]; -"279 DequantizeLinear_452_1" -> "286 Shape_260" [label="[]", style=solid]; -"279 DequantizeLinear_452_1" -> "291 Reshape_265" [label="[]", style=solid]; -"280 Shape_254" -> "282 Gather_256" [label="[-1]", style=dashed]; -"281 Constant_255" -> "282 Gather_256" [label="[]", style=dashed]; -"282 Gather_256" -> "295 Unsqueeze_267" [label="[]", style=dashed]; -"283 Shape_257" -> "285 Gather_259" [label="[-1]", style=dashed]; -"284 Constant_258" -> "285 Gather_259" [label="[]", style=dashed]; -"285 Gather_259" -> "296 Unsqueeze_268" [label="[]", style=dashed]; -"286 Shape_260" -> "288 Gather_262" [label="[-1]", style=dashed]; -"287 Constant_261" -> "288 Gather_262" [label="[]", style=dashed]; -"288 Gather_262" -> "289 Unsqueeze_263" [label="[]", style=dashed]; -"289 Unsqueeze_263" -> "290 Concat_264" [label="[1]", style=dashed]; -"290 Concat_264" -> "291 Reshape_265" [label="[2]", style=dashed]; -"291 Reshape_265" -> "294 Gemm_266" [label="[]", style=solid]; -"292 QuantizeLinear_h.1.attn.c_attn.weight_1" -> "293 DequantizeLinear_h.1.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; -"293 DequantizeLinear_h.1.attn.c_attn.weight_1" -> "294 Gemm_266" [label="[768, 2304]", style=solid]; -"294 Gemm_266" -> "298 Reshape_270" [label="[]", style=solid]; -"295 Unsqueeze_267" -> "297 Concat_269" [label="[1]", style=dashed]; -"296 Unsqueeze_268" -> "297 Concat_269" [label="[1]", style=dashed]; -"297 Concat_269" -> "298 Reshape_270" [label="[3]", style=dashed]; -"298 Reshape_270" -> "299 Split_271" [label="[]", style=solid]; -"299 Split_271" -> "300 QuantizeLinear_query.3_1" [label="[]", style=solid]; -"299 Split_271" -> "302 Shape_272" [label="[]", style=solid]; -"299 Split_271" -> "305 Shape_275" [label="[]", style=solid]; -"299 Split_271" -> "308 Shape_278" [label="[]", style=solid]; -"299 Split_271" -> "321 Shape_291" [label="[]", style=solid]; -"299 Split_271" -> "324 Shape_294" [label="[]", style=solid]; -"299 Split_271" -> "327 Shape_297" [label="[]", style=solid]; -"299 Split_271" -> "338 Reshape_308" [label="[]", style=solid]; -"299 Split_271" -> "342 Shape_310" [label="[]", style=solid]; -"299 Split_271" -> "345 Shape_313" [label="[]", style=solid]; -"299 Split_271" -> "348 Shape_316" [label="[]", style=solid]; -"299 Split_271" -> "359 Reshape_327" [label="[]", style=solid]; -"300 QuantizeLinear_query.3_1" -> "301 DequantizeLinear_query.3_1" [label="[]", style=dashed]; -"301 DequantizeLinear_query.3_1" -> "319 Reshape_289" [label="[]", style=solid]; -"302 Shape_272" -> "304 Gather_274" [label="[-1]", style=dashed]; -"303 Constant_273" -> "304 Gather_274" [label="[]", style=dashed]; -"304 Gather_274" -> "315 Unsqueeze_285" [label="[]", style=dashed]; -"305 Shape_275" -> "307 Gather_277" [label="[-1]", style=dashed]; -"306 Constant_276" -> "307 Gather_277" [label="[]", style=dashed]; -"307 Gather_277" -> "316 Unsqueeze_286" [label="[]", style=dashed]; -"308 Shape_278" -> "310 Gather_280" [label="[-1]", style=dashed]; -"309 Constant_279" -> "310 Gather_280" [label="[]", style=dashed]; -"310 Gather_280" -> "312 Div_282" [label="[]", style=dashed]; -"311 Constant_281" -> "312 Div_282" [label="[]", style=dashed]; -"312 Div_282" -> "313 Cast_283" [label="[]", style=dashed]; -"313 Cast_283" -> "314 Cast_284" [label="[]", style=dashed]; -"314 Cast_284" -> "317 Unsqueeze_287" [label="[]", style=dashed]; -"315 Unsqueeze_285" -> "318 Concat_288" [label="[1]", style=dashed]; -"316 Unsqueeze_286" -> "318 Concat_288" [label="[1]", style=dashed]; -"317 Unsqueeze_287" -> "318 Concat_288" [label="[1]", style=dashed]; -"318 Concat_288" -> "319 Reshape_289" [label="[4]", style=dashed]; -"319 Reshape_289" -> "320 Transpose_290" [label="[]", style=solid]; -"320 Transpose_290" -> "365 MatMul_333" [label="[]", style=solid]; -"321 Shape_291" -> "323 Gather_293" [label="[-1]", style=dashed]; -"322 Constant_292" -> "323 Gather_293" [label="[]", style=dashed]; -"323 Gather_293" -> "334 Unsqueeze_304" [label="[]", style=dashed]; -"324 Shape_294" -> "326 Gather_296" [label="[-1]", style=dashed]; -"325 Constant_295" -> "326 Gather_296" [label="[]", style=dashed]; -"326 Gather_296" -> "335 Unsqueeze_305" [label="[]", style=dashed]; -"327 Shape_297" -> "329 Gather_299" [label="[-1]", style=dashed]; -"328 Constant_298" -> "329 Gather_299" [label="[]", style=dashed]; -"329 Gather_299" -> "331 Div_301" [label="[]", style=dashed]; -"330 Constant_300" -> "331 Div_301" [label="[]", style=dashed]; -"331 Div_301" -> "332 Cast_302" [label="[]", style=dashed]; -"332 Cast_302" -> "333 Cast_303" [label="[]", style=dashed]; -"333 Cast_303" -> "336 Unsqueeze_306" [label="[]", style=dashed]; -"334 Unsqueeze_304" -> "337 Concat_307" [label="[1]", style=dashed]; -"335 Unsqueeze_305" -> "337 Concat_307" [label="[1]", style=dashed]; -"336 Unsqueeze_306" -> "337 Concat_307" [label="[1]", style=dashed]; -"337 Concat_307" -> "338 Reshape_308" [label="[4]", style=dashed]; -"338 Reshape_308" -> "339 QuantizeLinear_517_1" [label="[]", style=solid]; -"338 Reshape_308" -> "361 Transpose_329" [label="[]", style=solid]; -"339 QuantizeLinear_517_1" -> "340 DequantizeLinear_517_1" [label="[]", style=dashed]; -"340 DequantizeLinear_517_1" -> "341 Transpose_309" [label="[]", style=solid]; -"341 Transpose_309" -> "365 MatMul_333" [label="[]", style=solid]; -"342 Shape_310" -> "344 Gather_312" [label="[-1]", style=dashed]; -"343 Constant_311" -> "344 Gather_312" [label="[]", style=dashed]; -"344 Gather_312" -> "355 Unsqueeze_323" [label="[]", style=dashed]; -"345 Shape_313" -> "347 Gather_315" [label="[-1]", style=dashed]; -"346 Constant_314" -> "347 Gather_315" [label="[]", style=dashed]; -"347 Gather_315" -> "356 Unsqueeze_324" [label="[]", style=dashed]; -"348 Shape_316" -> "350 Gather_318" [label="[-1]", style=dashed]; -"349 Constant_317" -> "350 Gather_318" [label="[]", style=dashed]; -"350 Gather_318" -> "352 Div_320" [label="[]", style=dashed]; -"351 Constant_319" -> "352 Div_320" [label="[]", style=dashed]; -"352 Div_320" -> "353 Cast_321" [label="[]", style=dashed]; -"353 Cast_321" -> "354 Cast_322" [label="[]", style=dashed]; -"354 Cast_322" -> "357 Unsqueeze_325" [label="[]", style=dashed]; -"355 Unsqueeze_323" -> "358 Concat_326" [label="[1]", style=dashed]; -"356 Unsqueeze_324" -> "358 Concat_326" [label="[1]", style=dashed]; -"357 Unsqueeze_325" -> "358 Concat_326" [label="[1]", style=dashed]; -"358 Concat_326" -> "359 Reshape_327" [label="[4]", style=dashed]; -"359 Reshape_327" -> "360 Transpose_328" [label="[]", style=solid]; -"360 Transpose_328" -> "363 Unsqueeze_331" [label="[]", style=solid]; -"360 Transpose_328" -> "389 MatMul_357" [label="[]", style=solid]; -"361 Transpose_329" -> "362 Unsqueeze_330" [label="[]", style=solid]; -"362 Unsqueeze_330" -> "364 Concat_332" [label="[]", style=solid]; -"363 Unsqueeze_331" -> "364 Concat_332" [label="[]", style=solid]; -"364 Concat_332" -> "2828 nncf_model_output_2" [label="[2, 1, 12, 8, 64]", style=solid]; -"365 MatMul_333" -> "367 Div_335" [label="[]", style=solid]; -"366 Constant_334" -> "367 Div_335" [label="[]", style=solid]; -"367 Div_335" -> "368 Shape_336" [label="[]", style=solid]; -"367 Div_335" -> "371 Shape_339" [label="[]", style=solid]; -"367 Div_335" -> "382 Mul_350" [label="[]", style=solid]; -"368 Shape_336" -> "370 Gather_338" [label="[-1]", style=dashed]; -"369 Constant_337" -> "370 Gather_338" [label="[]", style=dashed]; -"370 Gather_338" -> "374 Sub_342" [label="[]", style=dashed]; -"371 Shape_339" -> "373 Gather_341" [label="[-1]", style=dashed]; -"372 Constant_340" -> "373 Gather_341" [label="[]", style=dashed]; -"373 Gather_341" -> "374 Sub_342" [label="[]", style=dashed]; -"373 Gather_341" -> "376 Unsqueeze_344" [label="[]", style=dashed]; -"373 Gather_341" -> "379 Unsqueeze_347" [label="[]", style=dashed]; -"374 Sub_342" -> "375 Unsqueeze_343" [label="[]", style=dashed]; -"375 Unsqueeze_343" -> "378 Slice_346" [label="[1]", style=dashed]; -"376 Unsqueeze_344" -> "378 Slice_346" [label="[1]", style=dashed]; -"377 Constant_345" -> "378 Slice_346" [label="[1]", style=dashed]; -"378 Slice_346" -> "381 Slice_349" [label="[]", style=solid]; -"379 Unsqueeze_347" -> "381 Slice_349" [label="[1]", style=dashed]; -"380 Constant_348" -> "381 Slice_349" [label="[1]", style=dashed]; -"381 Slice_349" -> "382 Mul_350" [label="[]", style=solid]; -"381 Slice_349" -> "384 Sub_352" [label="[]", style=solid]; -"382 Mul_350" -> "387 Sub_355" [label="[]", style=solid]; -"383 Constant_351" -> "384 Sub_352" [label="[]", style=solid]; -"384 Sub_352" -> "386 Mul_354" [label="[]", style=solid]; -"385 Constant_353" -> "386 Mul_354" [label="[]", style=solid]; -"386 Mul_354" -> "387 Sub_355" [label="[]", style=solid]; -"387 Sub_355" -> "388 Softmax_356" [label="[]", style=solid]; -"388 Softmax_356" -> "389 MatMul_357" [label="[]", style=solid]; -"389 MatMul_357" -> "390 QuantizeLinear_574_1" [label="[]", style=solid]; -"390 QuantizeLinear_574_1" -> "391 DequantizeLinear_574_1" [label="[]", style=dashed]; -"391 DequantizeLinear_574_1" -> "392 Transpose_358" [label="[]", style=solid]; -"392 Transpose_358" -> "393 Shape_359" [label="[]", style=solid]; -"392 Transpose_358" -> "396 Shape_362" [label="[]", style=solid]; -"392 Transpose_358" -> "399 Shape_365" [label="[]", style=solid]; -"392 Transpose_358" -> "402 Shape_368" [label="[]", style=solid]; -"392 Transpose_358" -> "410 Reshape_376" [label="[]", style=solid]; -"393 Shape_359" -> "395 Gather_361" [label="[-1]", style=dashed]; -"394 Constant_360" -> "395 Gather_361" [label="[]", style=dashed]; -"395 Gather_361" -> "406 Unsqueeze_372" [label="[]", style=dashed]; -"396 Shape_362" -> "398 Gather_364" [label="[-1]", style=dashed]; -"397 Constant_363" -> "398 Gather_364" [label="[]", style=dashed]; -"398 Gather_364" -> "407 Unsqueeze_373" [label="[]", style=dashed]; -"399 Shape_365" -> "401 Gather_367" [label="[-1]", style=dashed]; -"400 Constant_366" -> "401 Gather_367" [label="[]", style=dashed]; -"401 Gather_367" -> "405 Mul_371" [label="[]", style=dashed]; -"402 Shape_368" -> "404 Gather_370" [label="[-1]", style=dashed]; -"403 Constant_369" -> "404 Gather_370" [label="[]", style=dashed]; -"404 Gather_370" -> "405 Mul_371" [label="[]", style=dashed]; -"405 Mul_371" -> "408 Unsqueeze_374" [label="[]", style=dashed]; -"406 Unsqueeze_372" -> "409 Concat_375" [label="[1]", style=dashed]; -"407 Unsqueeze_373" -> "409 Concat_375" [label="[1]", style=dashed]; -"408 Unsqueeze_374" -> "409 Concat_375" [label="[1]", style=dashed]; -"409 Concat_375" -> "410 Reshape_376" [label="[3]", style=dashed]; -"410 Reshape_376" -> "411 Shape_377" [label="[]", style=solid]; -"410 Reshape_376" -> "414 Shape_380" [label="[]", style=solid]; -"410 Reshape_376" -> "417 Shape_383" [label="[]", style=solid]; -"410 Reshape_376" -> "422 Reshape_388" [label="[]", style=solid]; -"411 Shape_377" -> "413 Gather_379" [label="[-1]", style=dashed]; -"412 Constant_378" -> "413 Gather_379" [label="[]", style=dashed]; -"413 Gather_379" -> "426 Unsqueeze_390" [label="[]", style=dashed]; -"414 Shape_380" -> "416 Gather_382" [label="[-1]", style=dashed]; -"415 Constant_381" -> "416 Gather_382" [label="[]", style=dashed]; -"416 Gather_382" -> "427 Unsqueeze_391" [label="[]", style=dashed]; -"417 Shape_383" -> "419 Gather_385" [label="[-1]", style=dashed]; -"418 Constant_384" -> "419 Gather_385" [label="[]", style=dashed]; -"419 Gather_385" -> "420 Unsqueeze_386" [label="[]", style=dashed]; -"420 Unsqueeze_386" -> "421 Concat_387" [label="[1]", style=dashed]; -"421 Concat_387" -> "422 Reshape_388" [label="[2]", style=dashed]; -"422 Reshape_388" -> "425 Gemm_389" [label="[]", style=solid]; -"423 QuantizeLinear_h.1.attn.c_proj.weight_1" -> "424 DequantizeLinear_h.1.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; -"424 DequantizeLinear_h.1.attn.c_proj.weight_1" -> "425 Gemm_389" [label="[768, 768]", style=solid]; -"425 Gemm_389" -> "429 Reshape_393" [label="[]", style=solid]; -"426 Unsqueeze_390" -> "428 Concat_392" [label="[1]", style=dashed]; -"427 Unsqueeze_391" -> "428 Concat_392" [label="[1]", style=dashed]; -"428 Concat_392" -> "429 Reshape_393" [label="[3]", style=dashed]; -"429 Reshape_393" -> "430 Add_394" [label="[]", style=solid]; -"430 Add_394" -> "431 ReduceMean_395" [label="[]", style=solid]; -"430 Add_394" -> "432 Sub_396" [label="[]", style=solid]; -"430 Add_394" -> "497 Add_453" [label="[]", style=solid]; -"431 ReduceMean_395" -> "432 Sub_396" [label="[]", style=solid]; -"432 Sub_396" -> "434 Pow_398" [label="[]", style=solid]; -"432 Sub_396" -> "439 Div_403" [label="[]", style=solid]; -"433 Constant_397" -> "434 Pow_398" [label="[]", style=solid]; -"434 Pow_398" -> "435 ReduceMean_399" [label="[]", style=solid]; -"435 ReduceMean_399" -> "437 Add_401" [label="[]", style=solid]; -"436 Constant_400" -> "437 Add_401" [label="[]", style=solid]; -"437 Add_401" -> "438 Sqrt_402" [label="[]", style=solid]; -"438 Sqrt_402" -> "439 Div_403" [label="[]", style=solid]; -"439 Div_403" -> "440 Mul_404" [label="[]", style=solid]; -"440 Mul_404" -> "441 Add_405" [label="[]", style=solid]; -"441 Add_405" -> "442 QuantizeLinear_626_1" [label="[]", style=solid]; -"442 QuantizeLinear_626_1" -> "443 DequantizeLinear_626_1" [label="[]", style=dashed]; -"443 DequantizeLinear_626_1" -> "444 Shape_406" [label="[]", style=solid]; -"443 DequantizeLinear_626_1" -> "447 Shape_409" [label="[]", style=solid]; -"443 DequantizeLinear_626_1" -> "450 Shape_412" [label="[]", style=solid]; -"443 DequantizeLinear_626_1" -> "455 Reshape_417" [label="[]", style=solid]; -"444 Shape_406" -> "446 Gather_408" [label="[-1]", style=dashed]; -"445 Constant_407" -> "446 Gather_408" [label="[]", style=dashed]; -"446 Gather_408" -> "459 Unsqueeze_419" [label="[]", style=dashed]; -"447 Shape_409" -> "449 Gather_411" [label="[-1]", style=dashed]; -"448 Constant_410" -> "449 Gather_411" [label="[]", style=dashed]; -"449 Gather_411" -> "460 Unsqueeze_420" [label="[]", style=dashed]; -"450 Shape_412" -> "452 Gather_414" [label="[-1]", style=dashed]; -"451 Constant_413" -> "452 Gather_414" [label="[]", style=dashed]; -"452 Gather_414" -> "453 Unsqueeze_415" [label="[]", style=dashed]; -"453 Unsqueeze_415" -> "454 Concat_416" [label="[1]", style=dashed]; -"454 Concat_416" -> "455 Reshape_417" [label="[2]", style=dashed]; -"455 Reshape_417" -> "458 Gemm_418" [label="[]", style=solid]; -"456 QuantizeLinear_h.1.mlp.c_fc.weight_1" -> "457 DequantizeLinear_h.1.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; -"457 DequantizeLinear_h.1.mlp.c_fc.weight_1" -> "458 Gemm_418" [label="[768, 3072]", style=solid]; -"458 Gemm_418" -> "462 Reshape_422" [label="[]", style=solid]; -"459 Unsqueeze_419" -> "461 Concat_421" [label="[1]", style=dashed]; -"460 Unsqueeze_420" -> "461 Concat_421" [label="[1]", style=dashed]; -"461 Concat_421" -> "462 Reshape_422" [label="[3]", style=dashed]; -"462 Reshape_422" -> "464 Mul_424" [label="[]", style=solid]; -"462 Reshape_422" -> "466 Pow_426" [label="[]", style=solid]; -"462 Reshape_422" -> "469 Add_429" [label="[]", style=solid]; -"463 Constant_423" -> "464 Mul_424" [label="[]", style=solid]; -"464 Mul_424" -> "475 Mul_435" [label="[]", style=solid]; -"465 Constant_425" -> "466 Pow_426" [label="[]", style=solid]; -"466 Pow_426" -> "468 Mul_428" [label="[]", style=solid]; -"467 Constant_427" -> "468 Mul_428" [label="[]", style=solid]; -"468 Mul_428" -> "469 Add_429" [label="[]", style=solid]; -"469 Add_429" -> "471 Mul_431" [label="[]", style=solid]; -"470 Constant_430" -> "471 Mul_431" [label="[]", style=solid]; -"471 Mul_431" -> "472 Tanh_432" [label="[]", style=solid]; -"472 Tanh_432" -> "474 Add_434" [label="[]", style=solid]; -"473 Constant_433" -> "474 Add_434" [label="[]", style=solid]; -"474 Add_434" -> "475 Mul_435" [label="[]", style=solid]; -"475 Mul_435" -> "476 QuantizeLinear_660_1" [label="[]", style=solid]; -"476 QuantizeLinear_660_1" -> "477 DequantizeLinear_660_1" [label="[]", style=dashed]; -"477 DequantizeLinear_660_1" -> "478 Shape_436" [label="[]", style=solid]; -"477 DequantizeLinear_660_1" -> "481 Shape_439" [label="[]", style=solid]; -"477 DequantizeLinear_660_1" -> "484 Shape_442" [label="[]", style=solid]; -"477 DequantizeLinear_660_1" -> "489 Reshape_447" [label="[]", style=solid]; -"478 Shape_436" -> "480 Gather_438" [label="[-1]", style=dashed]; -"479 Constant_437" -> "480 Gather_438" [label="[]", style=dashed]; -"480 Gather_438" -> "493 Unsqueeze_449" [label="[]", style=dashed]; -"481 Shape_439" -> "483 Gather_441" [label="[-1]", style=dashed]; -"482 Constant_440" -> "483 Gather_441" [label="[]", style=dashed]; -"483 Gather_441" -> "494 Unsqueeze_450" [label="[]", style=dashed]; -"484 Shape_442" -> "486 Gather_444" [label="[-1]", style=dashed]; -"485 Constant_443" -> "486 Gather_444" [label="[]", style=dashed]; -"486 Gather_444" -> "487 Unsqueeze_445" [label="[]", style=dashed]; -"487 Unsqueeze_445" -> "488 Concat_446" [label="[1]", style=dashed]; -"488 Concat_446" -> "489 Reshape_447" [label="[2]", style=dashed]; -"489 Reshape_447" -> "492 Gemm_448" [label="[]", style=solid]; -"490 QuantizeLinear_h.1.mlp.c_proj.weight_1" -> "491 DequantizeLinear_h.1.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; -"491 DequantizeLinear_h.1.mlp.c_proj.weight_1" -> "492 Gemm_448" [label="[3072, 768]", style=solid]; -"492 Gemm_448" -> "496 Reshape_452" [label="[]", style=solid]; -"493 Unsqueeze_449" -> "495 Concat_451" [label="[1]", style=dashed]; -"494 Unsqueeze_450" -> "495 Concat_451" [label="[1]", style=dashed]; -"495 Concat_451" -> "496 Reshape_452" [label="[3]", style=dashed]; -"496 Reshape_452" -> "497 Add_453" [label="[]", style=solid]; -"497 Add_453" -> "498 ReduceMean_454" [label="[]", style=solid]; -"497 Add_453" -> "499 Sub_455" [label="[]", style=solid]; -"497 Add_453" -> "661 Add_605" [label="[]", style=solid]; -"498 ReduceMean_454" -> "499 Sub_455" [label="[]", style=solid]; -"499 Sub_455" -> "501 Pow_457" [label="[]", style=solid]; -"499 Sub_455" -> "506 Div_462" [label="[]", style=solid]; -"500 Constant_456" -> "501 Pow_457" [label="[]", style=solid]; -"501 Pow_457" -> "502 ReduceMean_458" [label="[]", style=solid]; -"502 ReduceMean_458" -> "504 Add_460" [label="[]", style=solid]; -"503 Constant_459" -> "504 Add_460" [label="[]", style=solid]; -"504 Add_460" -> "505 Sqrt_461" [label="[]", style=solid]; -"505 Sqrt_461" -> "506 Div_462" [label="[]", style=solid]; -"506 Div_462" -> "507 Mul_463" [label="[]", style=solid]; -"507 Mul_463" -> "508 Add_464" [label="[]", style=solid]; -"508 Add_464" -> "509 QuantizeLinear_693_1" [label="[]", style=solid]; -"509 QuantizeLinear_693_1" -> "510 DequantizeLinear_693_1" [label="[]", style=dashed]; -"510 DequantizeLinear_693_1" -> "511 Shape_465" [label="[]", style=solid]; -"510 DequantizeLinear_693_1" -> "514 Shape_468" [label="[]", style=solid]; -"510 DequantizeLinear_693_1" -> "517 Shape_471" [label="[]", style=solid]; -"510 DequantizeLinear_693_1" -> "522 Reshape_476" [label="[]", style=solid]; -"511 Shape_465" -> "513 Gather_467" [label="[-1]", style=dashed]; -"512 Constant_466" -> "513 Gather_467" [label="[]", style=dashed]; -"513 Gather_467" -> "526 Unsqueeze_478" [label="[]", style=dashed]; -"514 Shape_468" -> "516 Gather_470" [label="[-1]", style=dashed]; -"515 Constant_469" -> "516 Gather_470" [label="[]", style=dashed]; -"516 Gather_470" -> "527 Unsqueeze_479" [label="[]", style=dashed]; -"517 Shape_471" -> "519 Gather_473" [label="[-1]", style=dashed]; -"518 Constant_472" -> "519 Gather_473" [label="[]", style=dashed]; -"519 Gather_473" -> "520 Unsqueeze_474" [label="[]", style=dashed]; -"520 Unsqueeze_474" -> "521 Concat_475" [label="[1]", style=dashed]; -"521 Concat_475" -> "522 Reshape_476" [label="[2]", style=dashed]; -"522 Reshape_476" -> "525 Gemm_477" [label="[]", style=solid]; -"523 QuantizeLinear_h.2.attn.c_attn.weight_1" -> "524 DequantizeLinear_h.2.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; -"524 DequantizeLinear_h.2.attn.c_attn.weight_1" -> "525 Gemm_477" [label="[768, 2304]", style=solid]; -"525 Gemm_477" -> "529 Reshape_481" [label="[]", style=solid]; -"526 Unsqueeze_478" -> "528 Concat_480" [label="[1]", style=dashed]; -"527 Unsqueeze_479" -> "528 Concat_480" [label="[1]", style=dashed]; -"528 Concat_480" -> "529 Reshape_481" [label="[3]", style=dashed]; -"529 Reshape_481" -> "530 Split_482" [label="[]", style=solid]; -"530 Split_482" -> "531 QuantizeLinear_query.5_1" [label="[]", style=solid]; -"530 Split_482" -> "533 Shape_483" [label="[]", style=solid]; -"530 Split_482" -> "536 Shape_486" [label="[]", style=solid]; -"530 Split_482" -> "539 Shape_489" [label="[]", style=solid]; -"530 Split_482" -> "552 Shape_502" [label="[]", style=solid]; -"530 Split_482" -> "555 Shape_505" [label="[]", style=solid]; -"530 Split_482" -> "558 Shape_508" [label="[]", style=solid]; -"530 Split_482" -> "569 Reshape_519" [label="[]", style=solid]; -"530 Split_482" -> "573 Shape_521" [label="[]", style=solid]; -"530 Split_482" -> "576 Shape_524" [label="[]", style=solid]; -"530 Split_482" -> "579 Shape_527" [label="[]", style=solid]; -"530 Split_482" -> "590 Reshape_538" [label="[]", style=solid]; -"531 QuantizeLinear_query.5_1" -> "532 DequantizeLinear_query.5_1" [label="[]", style=dashed]; -"532 DequantizeLinear_query.5_1" -> "550 Reshape_500" [label="[]", style=solid]; -"533 Shape_483" -> "535 Gather_485" [label="[-1]", style=dashed]; -"534 Constant_484" -> "535 Gather_485" [label="[]", style=dashed]; -"535 Gather_485" -> "546 Unsqueeze_496" [label="[]", style=dashed]; -"536 Shape_486" -> "538 Gather_488" [label="[-1]", style=dashed]; -"537 Constant_487" -> "538 Gather_488" [label="[]", style=dashed]; -"538 Gather_488" -> "547 Unsqueeze_497" [label="[]", style=dashed]; -"539 Shape_489" -> "541 Gather_491" [label="[-1]", style=dashed]; -"540 Constant_490" -> "541 Gather_491" [label="[]", style=dashed]; -"541 Gather_491" -> "543 Div_493" [label="[]", style=dashed]; -"542 Constant_492" -> "543 Div_493" [label="[]", style=dashed]; -"543 Div_493" -> "544 Cast_494" [label="[]", style=dashed]; -"544 Cast_494" -> "545 Cast_495" [label="[]", style=dashed]; -"545 Cast_495" -> "548 Unsqueeze_498" [label="[]", style=dashed]; -"546 Unsqueeze_496" -> "549 Concat_499" [label="[1]", style=dashed]; -"547 Unsqueeze_497" -> "549 Concat_499" [label="[1]", style=dashed]; -"548 Unsqueeze_498" -> "549 Concat_499" [label="[1]", style=dashed]; -"549 Concat_499" -> "550 Reshape_500" [label="[4]", style=dashed]; -"550 Reshape_500" -> "551 Transpose_501" [label="[]", style=solid]; -"551 Transpose_501" -> "596 MatMul_544" [label="[]", style=solid]; -"552 Shape_502" -> "554 Gather_504" [label="[-1]", style=dashed]; -"553 Constant_503" -> "554 Gather_504" [label="[]", style=dashed]; -"554 Gather_504" -> "565 Unsqueeze_515" [label="[]", style=dashed]; -"555 Shape_505" -> "557 Gather_507" [label="[-1]", style=dashed]; -"556 Constant_506" -> "557 Gather_507" [label="[]", style=dashed]; -"557 Gather_507" -> "566 Unsqueeze_516" [label="[]", style=dashed]; -"558 Shape_508" -> "560 Gather_510" [label="[-1]", style=dashed]; -"559 Constant_509" -> "560 Gather_510" [label="[]", style=dashed]; -"560 Gather_510" -> "562 Div_512" [label="[]", style=dashed]; -"561 Constant_511" -> "562 Div_512" [label="[]", style=dashed]; -"562 Div_512" -> "563 Cast_513" [label="[]", style=dashed]; -"563 Cast_513" -> "564 Cast_514" [label="[]", style=dashed]; -"564 Cast_514" -> "567 Unsqueeze_517" [label="[]", style=dashed]; -"565 Unsqueeze_515" -> "568 Concat_518" [label="[1]", style=dashed]; -"566 Unsqueeze_516" -> "568 Concat_518" [label="[1]", style=dashed]; -"567 Unsqueeze_517" -> "568 Concat_518" [label="[1]", style=dashed]; -"568 Concat_518" -> "569 Reshape_519" [label="[4]", style=dashed]; -"569 Reshape_519" -> "570 QuantizeLinear_758_1" [label="[]", style=solid]; -"569 Reshape_519" -> "592 Transpose_540" [label="[]", style=solid]; -"570 QuantizeLinear_758_1" -> "571 DequantizeLinear_758_1" [label="[]", style=dashed]; -"571 DequantizeLinear_758_1" -> "572 Transpose_520" [label="[]", style=solid]; -"572 Transpose_520" -> "596 MatMul_544" [label="[]", style=solid]; -"573 Shape_521" -> "575 Gather_523" [label="[-1]", style=dashed]; -"574 Constant_522" -> "575 Gather_523" [label="[]", style=dashed]; -"575 Gather_523" -> "586 Unsqueeze_534" [label="[]", style=dashed]; -"576 Shape_524" -> "578 Gather_526" [label="[-1]", style=dashed]; -"577 Constant_525" -> "578 Gather_526" [label="[]", style=dashed]; -"578 Gather_526" -> "587 Unsqueeze_535" [label="[]", style=dashed]; -"579 Shape_527" -> "581 Gather_529" [label="[-1]", style=dashed]; -"580 Constant_528" -> "581 Gather_529" [label="[]", style=dashed]; -"581 Gather_529" -> "583 Div_531" [label="[]", style=dashed]; -"582 Constant_530" -> "583 Div_531" [label="[]", style=dashed]; -"583 Div_531" -> "584 Cast_532" [label="[]", style=dashed]; -"584 Cast_532" -> "585 Cast_533" [label="[]", style=dashed]; -"585 Cast_533" -> "588 Unsqueeze_536" [label="[]", style=dashed]; -"586 Unsqueeze_534" -> "589 Concat_537" [label="[1]", style=dashed]; -"587 Unsqueeze_535" -> "589 Concat_537" [label="[1]", style=dashed]; -"588 Unsqueeze_536" -> "589 Concat_537" [label="[1]", style=dashed]; -"589 Concat_537" -> "590 Reshape_538" [label="[4]", style=dashed]; -"590 Reshape_538" -> "591 Transpose_539" [label="[]", style=solid]; -"591 Transpose_539" -> "594 Unsqueeze_542" [label="[]", style=solid]; -"591 Transpose_539" -> "620 MatMul_568" [label="[]", style=solid]; -"592 Transpose_540" -> "593 Unsqueeze_541" [label="[]", style=solid]; -"593 Unsqueeze_541" -> "595 Concat_543" [label="[]", style=solid]; -"594 Unsqueeze_542" -> "595 Concat_543" [label="[]", style=solid]; -"595 Concat_543" -> "2829 nncf_model_output_3" [label="[2, 1, 12, 8, 64]", style=solid]; -"596 MatMul_544" -> "598 Div_546" [label="[]", style=solid]; -"597 Constant_545" -> "598 Div_546" [label="[]", style=solid]; -"598 Div_546" -> "599 Shape_547" [label="[]", style=solid]; -"598 Div_546" -> "602 Shape_550" [label="[]", style=solid]; -"598 Div_546" -> "613 Mul_561" [label="[]", style=solid]; -"599 Shape_547" -> "601 Gather_549" [label="[-1]", style=dashed]; -"600 Constant_548" -> "601 Gather_549" [label="[]", style=dashed]; -"601 Gather_549" -> "605 Sub_553" [label="[]", style=dashed]; -"602 Shape_550" -> "604 Gather_552" [label="[-1]", style=dashed]; -"603 Constant_551" -> "604 Gather_552" [label="[]", style=dashed]; -"604 Gather_552" -> "605 Sub_553" [label="[]", style=dashed]; -"604 Gather_552" -> "607 Unsqueeze_555" [label="[]", style=dashed]; -"604 Gather_552" -> "610 Unsqueeze_558" [label="[]", style=dashed]; -"605 Sub_553" -> "606 Unsqueeze_554" [label="[]", style=dashed]; -"606 Unsqueeze_554" -> "609 Slice_557" [label="[1]", style=dashed]; -"607 Unsqueeze_555" -> "609 Slice_557" [label="[1]", style=dashed]; -"608 Constant_556" -> "609 Slice_557" [label="[1]", style=dashed]; -"609 Slice_557" -> "612 Slice_560" [label="[]", style=solid]; -"610 Unsqueeze_558" -> "612 Slice_560" [label="[1]", style=dashed]; -"611 Constant_559" -> "612 Slice_560" [label="[1]", style=dashed]; -"612 Slice_560" -> "613 Mul_561" [label="[]", style=solid]; -"612 Slice_560" -> "615 Sub_563" [label="[]", style=solid]; -"613 Mul_561" -> "618 Sub_566" [label="[]", style=solid]; -"614 Constant_562" -> "615 Sub_563" [label="[]", style=solid]; -"615 Sub_563" -> "617 Mul_565" [label="[]", style=solid]; -"616 Constant_564" -> "617 Mul_565" [label="[]", style=solid]; -"617 Mul_565" -> "618 Sub_566" [label="[]", style=solid]; -"618 Sub_566" -> "619 Softmax_567" [label="[]", style=solid]; -"619 Softmax_567" -> "620 MatMul_568" [label="[]", style=solid]; -"620 MatMul_568" -> "621 QuantizeLinear_815_1" [label="[]", style=solid]; -"621 QuantizeLinear_815_1" -> "622 DequantizeLinear_815_1" [label="[]", style=dashed]; -"622 DequantizeLinear_815_1" -> "623 Transpose_569" [label="[]", style=solid]; -"623 Transpose_569" -> "624 Shape_570" [label="[]", style=solid]; -"623 Transpose_569" -> "627 Shape_573" [label="[]", style=solid]; -"623 Transpose_569" -> "630 Shape_576" [label="[]", style=solid]; -"623 Transpose_569" -> "633 Shape_579" [label="[]", style=solid]; -"623 Transpose_569" -> "641 Reshape_587" [label="[]", style=solid]; -"624 Shape_570" -> "626 Gather_572" [label="[-1]", style=dashed]; -"625 Constant_571" -> "626 Gather_572" [label="[]", style=dashed]; -"626 Gather_572" -> "637 Unsqueeze_583" [label="[]", style=dashed]; -"627 Shape_573" -> "629 Gather_575" [label="[-1]", style=dashed]; -"628 Constant_574" -> "629 Gather_575" [label="[]", style=dashed]; -"629 Gather_575" -> "638 Unsqueeze_584" [label="[]", style=dashed]; -"630 Shape_576" -> "632 Gather_578" [label="[-1]", style=dashed]; -"631 Constant_577" -> "632 Gather_578" [label="[]", style=dashed]; -"632 Gather_578" -> "636 Mul_582" [label="[]", style=dashed]; -"633 Shape_579" -> "635 Gather_581" [label="[-1]", style=dashed]; -"634 Constant_580" -> "635 Gather_581" [label="[]", style=dashed]; -"635 Gather_581" -> "636 Mul_582" [label="[]", style=dashed]; -"636 Mul_582" -> "639 Unsqueeze_585" [label="[]", style=dashed]; -"637 Unsqueeze_583" -> "640 Concat_586" [label="[1]", style=dashed]; -"638 Unsqueeze_584" -> "640 Concat_586" [label="[1]", style=dashed]; -"639 Unsqueeze_585" -> "640 Concat_586" [label="[1]", style=dashed]; -"640 Concat_586" -> "641 Reshape_587" [label="[3]", style=dashed]; -"641 Reshape_587" -> "642 Shape_588" [label="[]", style=solid]; -"641 Reshape_587" -> "645 Shape_591" [label="[]", style=solid]; -"641 Reshape_587" -> "648 Shape_594" [label="[]", style=solid]; -"641 Reshape_587" -> "653 Reshape_599" [label="[]", style=solid]; -"642 Shape_588" -> "644 Gather_590" [label="[-1]", style=dashed]; -"643 Constant_589" -> "644 Gather_590" [label="[]", style=dashed]; -"644 Gather_590" -> "657 Unsqueeze_601" [label="[]", style=dashed]; -"645 Shape_591" -> "647 Gather_593" [label="[-1]", style=dashed]; -"646 Constant_592" -> "647 Gather_593" [label="[]", style=dashed]; -"647 Gather_593" -> "658 Unsqueeze_602" [label="[]", style=dashed]; -"648 Shape_594" -> "650 Gather_596" [label="[-1]", style=dashed]; -"649 Constant_595" -> "650 Gather_596" [label="[]", style=dashed]; -"650 Gather_596" -> "651 Unsqueeze_597" [label="[]", style=dashed]; -"651 Unsqueeze_597" -> "652 Concat_598" [label="[1]", style=dashed]; -"652 Concat_598" -> "653 Reshape_599" [label="[2]", style=dashed]; -"653 Reshape_599" -> "656 Gemm_600" [label="[]", style=solid]; -"654 QuantizeLinear_h.2.attn.c_proj.weight_1" -> "655 DequantizeLinear_h.2.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; -"655 DequantizeLinear_h.2.attn.c_proj.weight_1" -> "656 Gemm_600" [label="[768, 768]", style=solid]; -"656 Gemm_600" -> "660 Reshape_604" [label="[]", style=solid]; -"657 Unsqueeze_601" -> "659 Concat_603" [label="[1]", style=dashed]; -"658 Unsqueeze_602" -> "659 Concat_603" [label="[1]", style=dashed]; -"659 Concat_603" -> "660 Reshape_604" [label="[3]", style=dashed]; -"660 Reshape_604" -> "661 Add_605" [label="[]", style=solid]; -"661 Add_605" -> "662 ReduceMean_606" [label="[]", style=solid]; -"661 Add_605" -> "663 Sub_607" [label="[]", style=solid]; -"661 Add_605" -> "728 Add_664" [label="[]", style=solid]; -"662 ReduceMean_606" -> "663 Sub_607" [label="[]", style=solid]; -"663 Sub_607" -> "665 Pow_609" [label="[]", style=solid]; -"663 Sub_607" -> "670 Div_614" [label="[]", style=solid]; -"664 Constant_608" -> "665 Pow_609" [label="[]", style=solid]; -"665 Pow_609" -> "666 ReduceMean_610" [label="[]", style=solid]; -"666 ReduceMean_610" -> "668 Add_612" [label="[]", style=solid]; -"667 Constant_611" -> "668 Add_612" [label="[]", style=solid]; -"668 Add_612" -> "669 Sqrt_613" [label="[]", style=solid]; -"669 Sqrt_613" -> "670 Div_614" [label="[]", style=solid]; -"670 Div_614" -> "671 Mul_615" [label="[]", style=solid]; -"671 Mul_615" -> "672 Add_616" [label="[]", style=solid]; -"672 Add_616" -> "673 QuantizeLinear_867_1" [label="[]", style=solid]; -"673 QuantizeLinear_867_1" -> "674 DequantizeLinear_867_1" [label="[]", style=dashed]; -"674 DequantizeLinear_867_1" -> "675 Shape_617" [label="[]", style=solid]; -"674 DequantizeLinear_867_1" -> "678 Shape_620" [label="[]", style=solid]; -"674 DequantizeLinear_867_1" -> "681 Shape_623" [label="[]", style=solid]; -"674 DequantizeLinear_867_1" -> "686 Reshape_628" [label="[]", style=solid]; -"675 Shape_617" -> "677 Gather_619" [label="[-1]", style=dashed]; -"676 Constant_618" -> "677 Gather_619" [label="[]", style=dashed]; -"677 Gather_619" -> "690 Unsqueeze_630" [label="[]", style=dashed]; -"678 Shape_620" -> "680 Gather_622" [label="[-1]", style=dashed]; -"679 Constant_621" -> "680 Gather_622" [label="[]", style=dashed]; -"680 Gather_622" -> "691 Unsqueeze_631" [label="[]", style=dashed]; -"681 Shape_623" -> "683 Gather_625" [label="[-1]", style=dashed]; -"682 Constant_624" -> "683 Gather_625" [label="[]", style=dashed]; -"683 Gather_625" -> "684 Unsqueeze_626" [label="[]", style=dashed]; -"684 Unsqueeze_626" -> "685 Concat_627" [label="[1]", style=dashed]; -"685 Concat_627" -> "686 Reshape_628" [label="[2]", style=dashed]; -"686 Reshape_628" -> "689 Gemm_629" [label="[]", style=solid]; -"687 QuantizeLinear_h.2.mlp.c_fc.weight_1" -> "688 DequantizeLinear_h.2.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; -"688 DequantizeLinear_h.2.mlp.c_fc.weight_1" -> "689 Gemm_629" [label="[768, 3072]", style=solid]; -"689 Gemm_629" -> "693 Reshape_633" [label="[]", style=solid]; -"690 Unsqueeze_630" -> "692 Concat_632" [label="[1]", style=dashed]; -"691 Unsqueeze_631" -> "692 Concat_632" [label="[1]", style=dashed]; -"692 Concat_632" -> "693 Reshape_633" [label="[3]", style=dashed]; -"693 Reshape_633" -> "695 Mul_635" [label="[]", style=solid]; -"693 Reshape_633" -> "697 Pow_637" [label="[]", style=solid]; -"693 Reshape_633" -> "700 Add_640" [label="[]", style=solid]; -"694 Constant_634" -> "695 Mul_635" [label="[]", style=solid]; -"695 Mul_635" -> "706 Mul_646" [label="[]", style=solid]; -"696 Constant_636" -> "697 Pow_637" [label="[]", style=solid]; -"697 Pow_637" -> "699 Mul_639" [label="[]", style=solid]; -"698 Constant_638" -> "699 Mul_639" [label="[]", style=solid]; -"699 Mul_639" -> "700 Add_640" [label="[]", style=solid]; -"700 Add_640" -> "702 Mul_642" [label="[]", style=solid]; -"701 Constant_641" -> "702 Mul_642" [label="[]", style=solid]; -"702 Mul_642" -> "703 Tanh_643" [label="[]", style=solid]; -"703 Tanh_643" -> "705 Add_645" [label="[]", style=solid]; -"704 Constant_644" -> "705 Add_645" [label="[]", style=solid]; -"705 Add_645" -> "706 Mul_646" [label="[]", style=solid]; -"706 Mul_646" -> "707 QuantizeLinear_901_1" [label="[]", style=solid]; -"707 QuantizeLinear_901_1" -> "708 DequantizeLinear_901_1" [label="[]", style=dashed]; -"708 DequantizeLinear_901_1" -> "709 Shape_647" [label="[]", style=solid]; -"708 DequantizeLinear_901_1" -> "712 Shape_650" [label="[]", style=solid]; -"708 DequantizeLinear_901_1" -> "715 Shape_653" [label="[]", style=solid]; -"708 DequantizeLinear_901_1" -> "720 Reshape_658" [label="[]", style=solid]; -"709 Shape_647" -> "711 Gather_649" [label="[-1]", style=dashed]; -"710 Constant_648" -> "711 Gather_649" [label="[]", style=dashed]; -"711 Gather_649" -> "724 Unsqueeze_660" [label="[]", style=dashed]; -"712 Shape_650" -> "714 Gather_652" [label="[-1]", style=dashed]; -"713 Constant_651" -> "714 Gather_652" [label="[]", style=dashed]; -"714 Gather_652" -> "725 Unsqueeze_661" [label="[]", style=dashed]; -"715 Shape_653" -> "717 Gather_655" [label="[-1]", style=dashed]; -"716 Constant_654" -> "717 Gather_655" [label="[]", style=dashed]; -"717 Gather_655" -> "718 Unsqueeze_656" [label="[]", style=dashed]; -"718 Unsqueeze_656" -> "719 Concat_657" [label="[1]", style=dashed]; -"719 Concat_657" -> "720 Reshape_658" [label="[2]", style=dashed]; -"720 Reshape_658" -> "723 Gemm_659" [label="[]", style=solid]; -"721 QuantizeLinear_h.2.mlp.c_proj.weight_1" -> "722 DequantizeLinear_h.2.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; -"722 DequantizeLinear_h.2.mlp.c_proj.weight_1" -> "723 Gemm_659" [label="[3072, 768]", style=solid]; -"723 Gemm_659" -> "727 Reshape_663" [label="[]", style=solid]; -"724 Unsqueeze_660" -> "726 Concat_662" [label="[1]", style=dashed]; -"725 Unsqueeze_661" -> "726 Concat_662" [label="[1]", style=dashed]; -"726 Concat_662" -> "727 Reshape_663" [label="[3]", style=dashed]; -"727 Reshape_663" -> "728 Add_664" [label="[]", style=solid]; -"728 Add_664" -> "729 ReduceMean_665" [label="[]", style=solid]; -"728 Add_664" -> "730 Sub_666" [label="[]", style=solid]; -"728 Add_664" -> "892 Add_816" [label="[]", style=solid]; -"729 ReduceMean_665" -> "730 Sub_666" [label="[]", style=solid]; -"730 Sub_666" -> "732 Pow_668" [label="[]", style=solid]; -"730 Sub_666" -> "737 Div_673" [label="[]", style=solid]; -"731 Constant_667" -> "732 Pow_668" [label="[]", style=solid]; -"732 Pow_668" -> "733 ReduceMean_669" [label="[]", style=solid]; -"733 ReduceMean_669" -> "735 Add_671" [label="[]", style=solid]; -"734 Constant_670" -> "735 Add_671" [label="[]", style=solid]; -"735 Add_671" -> "736 Sqrt_672" [label="[]", style=solid]; -"736 Sqrt_672" -> "737 Div_673" [label="[]", style=solid]; -"737 Div_673" -> "738 Mul_674" [label="[]", style=solid]; -"738 Mul_674" -> "739 Add_675" [label="[]", style=solid]; -"739 Add_675" -> "740 QuantizeLinear_934_1" [label="[]", style=solid]; -"740 QuantizeLinear_934_1" -> "741 DequantizeLinear_934_1" [label="[]", style=dashed]; -"741 DequantizeLinear_934_1" -> "742 Shape_676" [label="[]", style=solid]; -"741 DequantizeLinear_934_1" -> "745 Shape_679" [label="[]", style=solid]; -"741 DequantizeLinear_934_1" -> "748 Shape_682" [label="[]", style=solid]; -"741 DequantizeLinear_934_1" -> "753 Reshape_687" [label="[]", style=solid]; -"742 Shape_676" -> "744 Gather_678" [label="[-1]", style=dashed]; -"743 Constant_677" -> "744 Gather_678" [label="[]", style=dashed]; -"744 Gather_678" -> "757 Unsqueeze_689" [label="[]", style=dashed]; -"745 Shape_679" -> "747 Gather_681" [label="[-1]", style=dashed]; -"746 Constant_680" -> "747 Gather_681" [label="[]", style=dashed]; -"747 Gather_681" -> "758 Unsqueeze_690" [label="[]", style=dashed]; -"748 Shape_682" -> "750 Gather_684" [label="[-1]", style=dashed]; -"749 Constant_683" -> "750 Gather_684" [label="[]", style=dashed]; -"750 Gather_684" -> "751 Unsqueeze_685" [label="[]", style=dashed]; -"751 Unsqueeze_685" -> "752 Concat_686" [label="[1]", style=dashed]; -"752 Concat_686" -> "753 Reshape_687" [label="[2]", style=dashed]; -"753 Reshape_687" -> "756 Gemm_688" [label="[]", style=solid]; -"754 QuantizeLinear_h.3.attn.c_attn.weight_1" -> "755 DequantizeLinear_h.3.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; -"755 DequantizeLinear_h.3.attn.c_attn.weight_1" -> "756 Gemm_688" [label="[768, 2304]", style=solid]; -"756 Gemm_688" -> "760 Reshape_692" [label="[]", style=solid]; -"757 Unsqueeze_689" -> "759 Concat_691" [label="[1]", style=dashed]; -"758 Unsqueeze_690" -> "759 Concat_691" [label="[1]", style=dashed]; -"759 Concat_691" -> "760 Reshape_692" [label="[3]", style=dashed]; -"760 Reshape_692" -> "761 Split_693" [label="[]", style=solid]; -"761 Split_693" -> "762 QuantizeLinear_query.7_1" [label="[]", style=solid]; -"761 Split_693" -> "764 Shape_694" [label="[]", style=solid]; -"761 Split_693" -> "767 Shape_697" [label="[]", style=solid]; -"761 Split_693" -> "770 Shape_700" [label="[]", style=solid]; -"761 Split_693" -> "783 Shape_713" [label="[]", style=solid]; -"761 Split_693" -> "786 Shape_716" [label="[]", style=solid]; -"761 Split_693" -> "789 Shape_719" [label="[]", style=solid]; -"761 Split_693" -> "800 Reshape_730" [label="[]", style=solid]; -"761 Split_693" -> "804 Shape_732" [label="[]", style=solid]; -"761 Split_693" -> "807 Shape_735" [label="[]", style=solid]; -"761 Split_693" -> "810 Shape_738" [label="[]", style=solid]; -"761 Split_693" -> "821 Reshape_749" [label="[]", style=solid]; -"762 QuantizeLinear_query.7_1" -> "763 DequantizeLinear_query.7_1" [label="[]", style=dashed]; -"763 DequantizeLinear_query.7_1" -> "781 Reshape_711" [label="[]", style=solid]; -"764 Shape_694" -> "766 Gather_696" [label="[-1]", style=dashed]; -"765 Constant_695" -> "766 Gather_696" [label="[]", style=dashed]; -"766 Gather_696" -> "777 Unsqueeze_707" [label="[]", style=dashed]; -"767 Shape_697" -> "769 Gather_699" [label="[-1]", style=dashed]; -"768 Constant_698" -> "769 Gather_699" [label="[]", style=dashed]; -"769 Gather_699" -> "778 Unsqueeze_708" [label="[]", style=dashed]; -"770 Shape_700" -> "772 Gather_702" [label="[-1]", style=dashed]; -"771 Constant_701" -> "772 Gather_702" [label="[]", style=dashed]; -"772 Gather_702" -> "774 Div_704" [label="[]", style=dashed]; -"773 Constant_703" -> "774 Div_704" [label="[]", style=dashed]; -"774 Div_704" -> "775 Cast_705" [label="[]", style=dashed]; -"775 Cast_705" -> "776 Cast_706" [label="[]", style=dashed]; -"776 Cast_706" -> "779 Unsqueeze_709" [label="[]", style=dashed]; -"777 Unsqueeze_707" -> "780 Concat_710" [label="[1]", style=dashed]; -"778 Unsqueeze_708" -> "780 Concat_710" [label="[1]", style=dashed]; -"779 Unsqueeze_709" -> "780 Concat_710" [label="[1]", style=dashed]; -"780 Concat_710" -> "781 Reshape_711" [label="[4]", style=dashed]; -"781 Reshape_711" -> "782 Transpose_712" [label="[]", style=solid]; -"782 Transpose_712" -> "827 MatMul_755" [label="[]", style=solid]; -"783 Shape_713" -> "785 Gather_715" [label="[-1]", style=dashed]; -"784 Constant_714" -> "785 Gather_715" [label="[]", style=dashed]; -"785 Gather_715" -> "796 Unsqueeze_726" [label="[]", style=dashed]; -"786 Shape_716" -> "788 Gather_718" [label="[-1]", style=dashed]; -"787 Constant_717" -> "788 Gather_718" [label="[]", style=dashed]; -"788 Gather_718" -> "797 Unsqueeze_727" [label="[]", style=dashed]; -"789 Shape_719" -> "791 Gather_721" [label="[-1]", style=dashed]; -"790 Constant_720" -> "791 Gather_721" [label="[]", style=dashed]; -"791 Gather_721" -> "793 Div_723" [label="[]", style=dashed]; -"792 Constant_722" -> "793 Div_723" [label="[]", style=dashed]; -"793 Div_723" -> "794 Cast_724" [label="[]", style=dashed]; -"794 Cast_724" -> "795 Cast_725" [label="[]", style=dashed]; -"795 Cast_725" -> "798 Unsqueeze_728" [label="[]", style=dashed]; -"796 Unsqueeze_726" -> "799 Concat_729" [label="[1]", style=dashed]; -"797 Unsqueeze_727" -> "799 Concat_729" [label="[1]", style=dashed]; -"798 Unsqueeze_728" -> "799 Concat_729" [label="[1]", style=dashed]; -"799 Concat_729" -> "800 Reshape_730" [label="[4]", style=dashed]; -"800 Reshape_730" -> "801 QuantizeLinear_999_1" [label="[]", style=solid]; -"800 Reshape_730" -> "823 Transpose_751" [label="[]", style=solid]; -"801 QuantizeLinear_999_1" -> "802 DequantizeLinear_999_1" [label="[]", style=dashed]; -"802 DequantizeLinear_999_1" -> "803 Transpose_731" [label="[]", style=solid]; -"803 Transpose_731" -> "827 MatMul_755" [label="[]", style=solid]; -"804 Shape_732" -> "806 Gather_734" [label="[-1]", style=dashed]; -"805 Constant_733" -> "806 Gather_734" [label="[]", style=dashed]; -"806 Gather_734" -> "817 Unsqueeze_745" [label="[]", style=dashed]; -"807 Shape_735" -> "809 Gather_737" [label="[-1]", style=dashed]; -"808 Constant_736" -> "809 Gather_737" [label="[]", style=dashed]; -"809 Gather_737" -> "818 Unsqueeze_746" [label="[]", style=dashed]; -"810 Shape_738" -> "812 Gather_740" [label="[-1]", style=dashed]; -"811 Constant_739" -> "812 Gather_740" [label="[]", style=dashed]; -"812 Gather_740" -> "814 Div_742" [label="[]", style=dashed]; -"813 Constant_741" -> "814 Div_742" [label="[]", style=dashed]; -"814 Div_742" -> "815 Cast_743" [label="[]", style=dashed]; -"815 Cast_743" -> "816 Cast_744" [label="[]", style=dashed]; -"816 Cast_744" -> "819 Unsqueeze_747" [label="[]", style=dashed]; -"817 Unsqueeze_745" -> "820 Concat_748" [label="[1]", style=dashed]; -"818 Unsqueeze_746" -> "820 Concat_748" [label="[1]", style=dashed]; -"819 Unsqueeze_747" -> "820 Concat_748" [label="[1]", style=dashed]; -"820 Concat_748" -> "821 Reshape_749" [label="[4]", style=dashed]; -"821 Reshape_749" -> "822 Transpose_750" [label="[]", style=solid]; -"822 Transpose_750" -> "825 Unsqueeze_753" [label="[]", style=solid]; -"822 Transpose_750" -> "851 MatMul_779" [label="[]", style=solid]; -"823 Transpose_751" -> "824 Unsqueeze_752" [label="[]", style=solid]; -"824 Unsqueeze_752" -> "826 Concat_754" [label="[]", style=solid]; -"825 Unsqueeze_753" -> "826 Concat_754" [label="[]", style=solid]; -"826 Concat_754" -> "2830 nncf_model_output_4" [label="[2, 1, 12, 8, 64]", style=solid]; -"827 MatMul_755" -> "829 Div_757" [label="[]", style=solid]; -"828 Constant_756" -> "829 Div_757" [label="[]", style=solid]; -"829 Div_757" -> "830 Shape_758" [label="[]", style=solid]; -"829 Div_757" -> "833 Shape_761" [label="[]", style=solid]; -"829 Div_757" -> "844 Mul_772" [label="[]", style=solid]; -"830 Shape_758" -> "832 Gather_760" [label="[-1]", style=dashed]; -"831 Constant_759" -> "832 Gather_760" [label="[]", style=dashed]; -"832 Gather_760" -> "836 Sub_764" [label="[]", style=dashed]; -"833 Shape_761" -> "835 Gather_763" [label="[-1]", style=dashed]; -"834 Constant_762" -> "835 Gather_763" [label="[]", style=dashed]; -"835 Gather_763" -> "836 Sub_764" [label="[]", style=dashed]; -"835 Gather_763" -> "838 Unsqueeze_766" [label="[]", style=dashed]; -"835 Gather_763" -> "841 Unsqueeze_769" [label="[]", style=dashed]; -"836 Sub_764" -> "837 Unsqueeze_765" [label="[]", style=dashed]; -"837 Unsqueeze_765" -> "840 Slice_768" [label="[1]", style=dashed]; -"838 Unsqueeze_766" -> "840 Slice_768" [label="[1]", style=dashed]; -"839 Constant_767" -> "840 Slice_768" [label="[1]", style=dashed]; -"840 Slice_768" -> "843 Slice_771" [label="[]", style=solid]; -"841 Unsqueeze_769" -> "843 Slice_771" [label="[1]", style=dashed]; -"842 Constant_770" -> "843 Slice_771" [label="[1]", style=dashed]; -"843 Slice_771" -> "844 Mul_772" [label="[]", style=solid]; -"843 Slice_771" -> "846 Sub_774" [label="[]", style=solid]; -"844 Mul_772" -> "849 Sub_777" [label="[]", style=solid]; -"845 Constant_773" -> "846 Sub_774" [label="[]", style=solid]; -"846 Sub_774" -> "848 Mul_776" [label="[]", style=solid]; -"847 Constant_775" -> "848 Mul_776" [label="[]", style=solid]; -"848 Mul_776" -> "849 Sub_777" [label="[]", style=solid]; -"849 Sub_777" -> "850 Softmax_778" [label="[]", style=solid]; -"850 Softmax_778" -> "851 MatMul_779" [label="[]", style=solid]; -"851 MatMul_779" -> "852 QuantizeLinear_1056_1" [label="[]", style=solid]; -"852 QuantizeLinear_1056_1" -> "853 DequantizeLinear_1056_1" [label="[]", style=dashed]; -"853 DequantizeLinear_1056_1" -> "854 Transpose_780" [label="[]", style=solid]; -"854 Transpose_780" -> "855 Shape_781" [label="[]", style=solid]; -"854 Transpose_780" -> "858 Shape_784" [label="[]", style=solid]; -"854 Transpose_780" -> "861 Shape_787" [label="[]", style=solid]; -"854 Transpose_780" -> "864 Shape_790" [label="[]", style=solid]; -"854 Transpose_780" -> "872 Reshape_798" [label="[]", style=solid]; -"855 Shape_781" -> "857 Gather_783" [label="[-1]", style=dashed]; -"856 Constant_782" -> "857 Gather_783" [label="[]", style=dashed]; -"857 Gather_783" -> "868 Unsqueeze_794" [label="[]", style=dashed]; -"858 Shape_784" -> "860 Gather_786" [label="[-1]", style=dashed]; -"859 Constant_785" -> "860 Gather_786" [label="[]", style=dashed]; -"860 Gather_786" -> "869 Unsqueeze_795" [label="[]", style=dashed]; -"861 Shape_787" -> "863 Gather_789" [label="[-1]", style=dashed]; -"862 Constant_788" -> "863 Gather_789" [label="[]", style=dashed]; -"863 Gather_789" -> "867 Mul_793" [label="[]", style=dashed]; -"864 Shape_790" -> "866 Gather_792" [label="[-1]", style=dashed]; -"865 Constant_791" -> "866 Gather_792" [label="[]", style=dashed]; -"866 Gather_792" -> "867 Mul_793" [label="[]", style=dashed]; -"867 Mul_793" -> "870 Unsqueeze_796" [label="[]", style=dashed]; -"868 Unsqueeze_794" -> "871 Concat_797" [label="[1]", style=dashed]; -"869 Unsqueeze_795" -> "871 Concat_797" [label="[1]", style=dashed]; -"870 Unsqueeze_796" -> "871 Concat_797" [label="[1]", style=dashed]; -"871 Concat_797" -> "872 Reshape_798" [label="[3]", style=dashed]; -"872 Reshape_798" -> "873 Shape_799" [label="[]", style=solid]; -"872 Reshape_798" -> "876 Shape_802" [label="[]", style=solid]; -"872 Reshape_798" -> "879 Shape_805" [label="[]", style=solid]; -"872 Reshape_798" -> "884 Reshape_810" [label="[]", style=solid]; -"873 Shape_799" -> "875 Gather_801" [label="[-1]", style=dashed]; -"874 Constant_800" -> "875 Gather_801" [label="[]", style=dashed]; -"875 Gather_801" -> "888 Unsqueeze_812" [label="[]", style=dashed]; -"876 Shape_802" -> "878 Gather_804" [label="[-1]", style=dashed]; -"877 Constant_803" -> "878 Gather_804" [label="[]", style=dashed]; -"878 Gather_804" -> "889 Unsqueeze_813" [label="[]", style=dashed]; -"879 Shape_805" -> "881 Gather_807" [label="[-1]", style=dashed]; -"880 Constant_806" -> "881 Gather_807" [label="[]", style=dashed]; -"881 Gather_807" -> "882 Unsqueeze_808" [label="[]", style=dashed]; -"882 Unsqueeze_808" -> "883 Concat_809" [label="[1]", style=dashed]; -"883 Concat_809" -> "884 Reshape_810" [label="[2]", style=dashed]; -"884 Reshape_810" -> "887 Gemm_811" [label="[]", style=solid]; -"885 QuantizeLinear_h.3.attn.c_proj.weight_1" -> "886 DequantizeLinear_h.3.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; -"886 DequantizeLinear_h.3.attn.c_proj.weight_1" -> "887 Gemm_811" [label="[768, 768]", style=solid]; -"887 Gemm_811" -> "891 Reshape_815" [label="[]", style=solid]; -"888 Unsqueeze_812" -> "890 Concat_814" [label="[1]", style=dashed]; -"889 Unsqueeze_813" -> "890 Concat_814" [label="[1]", style=dashed]; -"890 Concat_814" -> "891 Reshape_815" [label="[3]", style=dashed]; -"891 Reshape_815" -> "892 Add_816" [label="[]", style=solid]; -"892 Add_816" -> "893 ReduceMean_817" [label="[]", style=solid]; -"892 Add_816" -> "894 Sub_818" [label="[]", style=solid]; -"892 Add_816" -> "959 Add_875" [label="[]", style=solid]; -"893 ReduceMean_817" -> "894 Sub_818" [label="[]", style=solid]; -"894 Sub_818" -> "896 Pow_820" [label="[]", style=solid]; -"894 Sub_818" -> "901 Div_825" [label="[]", style=solid]; -"895 Constant_819" -> "896 Pow_820" [label="[]", style=solid]; -"896 Pow_820" -> "897 ReduceMean_821" [label="[]", style=solid]; -"897 ReduceMean_821" -> "899 Add_823" [label="[]", style=solid]; -"898 Constant_822" -> "899 Add_823" [label="[]", style=solid]; -"899 Add_823" -> "900 Sqrt_824" [label="[]", style=solid]; -"900 Sqrt_824" -> "901 Div_825" [label="[]", style=solid]; -"901 Div_825" -> "902 Mul_826" [label="[]", style=solid]; -"902 Mul_826" -> "903 Add_827" [label="[]", style=solid]; -"903 Add_827" -> "904 QuantizeLinear_1108_1" [label="[]", style=solid]; -"904 QuantizeLinear_1108_1" -> "905 DequantizeLinear_1108_1" [label="[]", style=dashed]; -"905 DequantizeLinear_1108_1" -> "906 Shape_828" [label="[]", style=solid]; -"905 DequantizeLinear_1108_1" -> "909 Shape_831" [label="[]", style=solid]; -"905 DequantizeLinear_1108_1" -> "912 Shape_834" [label="[]", style=solid]; -"905 DequantizeLinear_1108_1" -> "917 Reshape_839" [label="[]", style=solid]; -"906 Shape_828" -> "908 Gather_830" [label="[-1]", style=dashed]; -"907 Constant_829" -> "908 Gather_830" [label="[]", style=dashed]; -"908 Gather_830" -> "921 Unsqueeze_841" [label="[]", style=dashed]; -"909 Shape_831" -> "911 Gather_833" [label="[-1]", style=dashed]; -"910 Constant_832" -> "911 Gather_833" [label="[]", style=dashed]; -"911 Gather_833" -> "922 Unsqueeze_842" [label="[]", style=dashed]; -"912 Shape_834" -> "914 Gather_836" [label="[-1]", style=dashed]; -"913 Constant_835" -> "914 Gather_836" [label="[]", style=dashed]; -"914 Gather_836" -> "915 Unsqueeze_837" [label="[]", style=dashed]; -"915 Unsqueeze_837" -> "916 Concat_838" [label="[1]", style=dashed]; -"916 Concat_838" -> "917 Reshape_839" [label="[2]", style=dashed]; -"917 Reshape_839" -> "920 Gemm_840" [label="[]", style=solid]; -"918 QuantizeLinear_h.3.mlp.c_fc.weight_1" -> "919 DequantizeLinear_h.3.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; -"919 DequantizeLinear_h.3.mlp.c_fc.weight_1" -> "920 Gemm_840" [label="[768, 3072]", style=solid]; -"920 Gemm_840" -> "924 Reshape_844" [label="[]", style=solid]; -"921 Unsqueeze_841" -> "923 Concat_843" [label="[1]", style=dashed]; -"922 Unsqueeze_842" -> "923 Concat_843" [label="[1]", style=dashed]; -"923 Concat_843" -> "924 Reshape_844" [label="[3]", style=dashed]; -"924 Reshape_844" -> "926 Mul_846" [label="[]", style=solid]; -"924 Reshape_844" -> "928 Pow_848" [label="[]", style=solid]; -"924 Reshape_844" -> "931 Add_851" [label="[]", style=solid]; -"925 Constant_845" -> "926 Mul_846" [label="[]", style=solid]; -"926 Mul_846" -> "937 Mul_857" [label="[]", style=solid]; -"927 Constant_847" -> "928 Pow_848" [label="[]", style=solid]; -"928 Pow_848" -> "930 Mul_850" [label="[]", style=solid]; -"929 Constant_849" -> "930 Mul_850" [label="[]", style=solid]; -"930 Mul_850" -> "931 Add_851" [label="[]", style=solid]; -"931 Add_851" -> "933 Mul_853" [label="[]", style=solid]; -"932 Constant_852" -> "933 Mul_853" [label="[]", style=solid]; -"933 Mul_853" -> "934 Tanh_854" [label="[]", style=solid]; -"934 Tanh_854" -> "936 Add_856" [label="[]", style=solid]; -"935 Constant_855" -> "936 Add_856" [label="[]", style=solid]; -"936 Add_856" -> "937 Mul_857" [label="[]", style=solid]; -"937 Mul_857" -> "938 QuantizeLinear_1142_1" [label="[]", style=solid]; -"938 QuantizeLinear_1142_1" -> "939 DequantizeLinear_1142_1" [label="[]", style=dashed]; -"939 DequantizeLinear_1142_1" -> "940 Shape_858" [label="[]", style=solid]; -"939 DequantizeLinear_1142_1" -> "943 Shape_861" [label="[]", style=solid]; -"939 DequantizeLinear_1142_1" -> "946 Shape_864" [label="[]", style=solid]; -"939 DequantizeLinear_1142_1" -> "951 Reshape_869" [label="[]", style=solid]; -"940 Shape_858" -> "942 Gather_860" [label="[-1]", style=dashed]; -"941 Constant_859" -> "942 Gather_860" [label="[]", style=dashed]; -"942 Gather_860" -> "955 Unsqueeze_871" [label="[]", style=dashed]; -"943 Shape_861" -> "945 Gather_863" [label="[-1]", style=dashed]; -"944 Constant_862" -> "945 Gather_863" [label="[]", style=dashed]; -"945 Gather_863" -> "956 Unsqueeze_872" [label="[]", style=dashed]; -"946 Shape_864" -> "948 Gather_866" [label="[-1]", style=dashed]; -"947 Constant_865" -> "948 Gather_866" [label="[]", style=dashed]; -"948 Gather_866" -> "949 Unsqueeze_867" [label="[]", style=dashed]; -"949 Unsqueeze_867" -> "950 Concat_868" [label="[1]", style=dashed]; -"950 Concat_868" -> "951 Reshape_869" [label="[2]", style=dashed]; -"951 Reshape_869" -> "954 Gemm_870" [label="[]", style=solid]; -"952 QuantizeLinear_h.3.mlp.c_proj.weight_1" -> "953 DequantizeLinear_h.3.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; -"953 DequantizeLinear_h.3.mlp.c_proj.weight_1" -> "954 Gemm_870" [label="[3072, 768]", style=solid]; -"954 Gemm_870" -> "958 Reshape_874" [label="[]", style=solid]; -"955 Unsqueeze_871" -> "957 Concat_873" [label="[1]", style=dashed]; -"956 Unsqueeze_872" -> "957 Concat_873" [label="[1]", style=dashed]; -"957 Concat_873" -> "958 Reshape_874" [label="[3]", style=dashed]; -"958 Reshape_874" -> "959 Add_875" [label="[]", style=solid]; -"959 Add_875" -> "960 ReduceMean_876" [label="[]", style=solid]; -"959 Add_875" -> "961 Sub_877" [label="[]", style=solid]; -"959 Add_875" -> "1123 Add_1027" [label="[]", style=solid]; -"960 ReduceMean_876" -> "961 Sub_877" [label="[]", style=solid]; -"961 Sub_877" -> "963 Pow_879" [label="[]", style=solid]; -"961 Sub_877" -> "968 Div_884" [label="[]", style=solid]; -"962 Constant_878" -> "963 Pow_879" [label="[]", style=solid]; -"963 Pow_879" -> "964 ReduceMean_880" [label="[]", style=solid]; -"964 ReduceMean_880" -> "966 Add_882" [label="[]", style=solid]; -"965 Constant_881" -> "966 Add_882" [label="[]", style=solid]; -"966 Add_882" -> "967 Sqrt_883" [label="[]", style=solid]; -"967 Sqrt_883" -> "968 Div_884" [label="[]", style=solid]; -"968 Div_884" -> "969 Mul_885" [label="[]", style=solid]; -"969 Mul_885" -> "970 Add_886" [label="[]", style=solid]; -"970 Add_886" -> "971 QuantizeLinear_1175_1" [label="[]", style=solid]; -"971 QuantizeLinear_1175_1" -> "972 DequantizeLinear_1175_1" [label="[]", style=dashed]; -"972 DequantizeLinear_1175_1" -> "973 Shape_887" [label="[]", style=solid]; -"972 DequantizeLinear_1175_1" -> "976 Shape_890" [label="[]", style=solid]; -"972 DequantizeLinear_1175_1" -> "979 Shape_893" [label="[]", style=solid]; -"972 DequantizeLinear_1175_1" -> "984 Reshape_898" [label="[]", style=solid]; -"973 Shape_887" -> "975 Gather_889" [label="[-1]", style=dashed]; -"974 Constant_888" -> "975 Gather_889" [label="[]", style=dashed]; -"975 Gather_889" -> "988 Unsqueeze_900" [label="[]", style=dashed]; -"976 Shape_890" -> "978 Gather_892" [label="[-1]", style=dashed]; -"977 Constant_891" -> "978 Gather_892" [label="[]", style=dashed]; -"978 Gather_892" -> "989 Unsqueeze_901" [label="[]", style=dashed]; -"979 Shape_893" -> "981 Gather_895" [label="[-1]", style=dashed]; -"980 Constant_894" -> "981 Gather_895" [label="[]", style=dashed]; -"981 Gather_895" -> "982 Unsqueeze_896" [label="[]", style=dashed]; -"982 Unsqueeze_896" -> "983 Concat_897" [label="[1]", style=dashed]; -"983 Concat_897" -> "984 Reshape_898" [label="[2]", style=dashed]; -"984 Reshape_898" -> "987 Gemm_899" [label="[]", style=solid]; -"985 QuantizeLinear_h.4.attn.c_attn.weight_1" -> "986 DequantizeLinear_h.4.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; -"986 DequantizeLinear_h.4.attn.c_attn.weight_1" -> "987 Gemm_899" [label="[768, 2304]", style=solid]; -"987 Gemm_899" -> "991 Reshape_903" [label="[]", style=solid]; -"988 Unsqueeze_900" -> "990 Concat_902" [label="[1]", style=dashed]; -"989 Unsqueeze_901" -> "990 Concat_902" [label="[1]", style=dashed]; -"990 Concat_902" -> "991 Reshape_903" [label="[3]", style=dashed]; -"991 Reshape_903" -> "992 Split_904" [label="[]", style=solid]; -"992 Split_904" -> "993 QuantizeLinear_query.9_1" [label="[]", style=solid]; -"992 Split_904" -> "995 Shape_905" [label="[]", style=solid]; -"992 Split_904" -> "998 Shape_908" [label="[]", style=solid]; -"992 Split_904" -> "1001 Shape_911" [label="[]", style=solid]; -"992 Split_904" -> "1014 Shape_924" [label="[]", style=solid]; -"992 Split_904" -> "1017 Shape_927" [label="[]", style=solid]; -"992 Split_904" -> "1020 Shape_930" [label="[]", style=solid]; -"992 Split_904" -> "1031 Reshape_941" [label="[]", style=solid]; -"992 Split_904" -> "1035 Shape_943" [label="[]", style=solid]; -"992 Split_904" -> "1038 Shape_946" [label="[]", style=solid]; -"992 Split_904" -> "1041 Shape_949" [label="[]", style=solid]; -"992 Split_904" -> "1052 Reshape_960" [label="[]", style=solid]; -"993 QuantizeLinear_query.9_1" -> "994 DequantizeLinear_query.9_1" [label="[]", style=dashed]; -"994 DequantizeLinear_query.9_1" -> "1012 Reshape_922" [label="[]", style=solid]; -"995 Shape_905" -> "997 Gather_907" [label="[-1]", style=dashed]; -"996 Constant_906" -> "997 Gather_907" [label="[]", style=dashed]; -"997 Gather_907" -> "1008 Unsqueeze_918" [label="[]", style=dashed]; -"998 Shape_908" -> "1000 Gather_910" [label="[-1]", style=dashed]; -"999 Constant_909" -> "1000 Gather_910" [label="[]", style=dashed]; -"1000 Gather_910" -> "1009 Unsqueeze_919" [label="[]", style=dashed]; -"1001 Shape_911" -> "1003 Gather_913" [label="[-1]", style=dashed]; -"1002 Constant_912" -> "1003 Gather_913" [label="[]", style=dashed]; -"1003 Gather_913" -> "1005 Div_915" [label="[]", style=dashed]; -"1004 Constant_914" -> "1005 Div_915" [label="[]", style=dashed]; -"1005 Div_915" -> "1006 Cast_916" [label="[]", style=dashed]; -"1006 Cast_916" -> "1007 Cast_917" [label="[]", style=dashed]; -"1007 Cast_917" -> "1010 Unsqueeze_920" [label="[]", style=dashed]; -"1008 Unsqueeze_918" -> "1011 Concat_921" [label="[1]", style=dashed]; -"1009 Unsqueeze_919" -> "1011 Concat_921" [label="[1]", style=dashed]; -"1010 Unsqueeze_920" -> "1011 Concat_921" [label="[1]", style=dashed]; -"1011 Concat_921" -> "1012 Reshape_922" [label="[4]", style=dashed]; -"1012 Reshape_922" -> "1013 Transpose_923" [label="[]", style=solid]; -"1013 Transpose_923" -> "1058 MatMul_966" [label="[]", style=solid]; -"1014 Shape_924" -> "1016 Gather_926" [label="[-1]", style=dashed]; -"1015 Constant_925" -> "1016 Gather_926" [label="[]", style=dashed]; -"1016 Gather_926" -> "1027 Unsqueeze_937" [label="[]", style=dashed]; -"1017 Shape_927" -> "1019 Gather_929" [label="[-1]", style=dashed]; -"1018 Constant_928" -> "1019 Gather_929" [label="[]", style=dashed]; -"1019 Gather_929" -> "1028 Unsqueeze_938" [label="[]", style=dashed]; -"1020 Shape_930" -> "1022 Gather_932" [label="[-1]", style=dashed]; -"1021 Constant_931" -> "1022 Gather_932" [label="[]", style=dashed]; -"1022 Gather_932" -> "1024 Div_934" [label="[]", style=dashed]; -"1023 Constant_933" -> "1024 Div_934" [label="[]", style=dashed]; -"1024 Div_934" -> "1025 Cast_935" [label="[]", style=dashed]; -"1025 Cast_935" -> "1026 Cast_936" [label="[]", style=dashed]; -"1026 Cast_936" -> "1029 Unsqueeze_939" [label="[]", style=dashed]; -"1027 Unsqueeze_937" -> "1030 Concat_940" [label="[1]", style=dashed]; -"1028 Unsqueeze_938" -> "1030 Concat_940" [label="[1]", style=dashed]; -"1029 Unsqueeze_939" -> "1030 Concat_940" [label="[1]", style=dashed]; -"1030 Concat_940" -> "1031 Reshape_941" [label="[4]", style=dashed]; -"1031 Reshape_941" -> "1032 QuantizeLinear_1240_1" [label="[]", style=solid]; -"1031 Reshape_941" -> "1054 Transpose_962" [label="[]", style=solid]; -"1032 QuantizeLinear_1240_1" -> "1033 DequantizeLinear_1240_1" [label="[]", style=dashed]; -"1033 DequantizeLinear_1240_1" -> "1034 Transpose_942" [label="[]", style=solid]; -"1034 Transpose_942" -> "1058 MatMul_966" [label="[]", style=solid]; -"1035 Shape_943" -> "1037 Gather_945" [label="[-1]", style=dashed]; -"1036 Constant_944" -> "1037 Gather_945" [label="[]", style=dashed]; -"1037 Gather_945" -> "1048 Unsqueeze_956" [label="[]", style=dashed]; -"1038 Shape_946" -> "1040 Gather_948" [label="[-1]", style=dashed]; -"1039 Constant_947" -> "1040 Gather_948" [label="[]", style=dashed]; -"1040 Gather_948" -> "1049 Unsqueeze_957" [label="[]", style=dashed]; -"1041 Shape_949" -> "1043 Gather_951" [label="[-1]", style=dashed]; -"1042 Constant_950" -> "1043 Gather_951" [label="[]", style=dashed]; -"1043 Gather_951" -> "1045 Div_953" [label="[]", style=dashed]; -"1044 Constant_952" -> "1045 Div_953" [label="[]", style=dashed]; -"1045 Div_953" -> "1046 Cast_954" [label="[]", style=dashed]; -"1046 Cast_954" -> "1047 Cast_955" [label="[]", style=dashed]; -"1047 Cast_955" -> "1050 Unsqueeze_958" [label="[]", style=dashed]; -"1048 Unsqueeze_956" -> "1051 Concat_959" [label="[1]", style=dashed]; -"1049 Unsqueeze_957" -> "1051 Concat_959" [label="[1]", style=dashed]; -"1050 Unsqueeze_958" -> "1051 Concat_959" [label="[1]", style=dashed]; -"1051 Concat_959" -> "1052 Reshape_960" [label="[4]", style=dashed]; -"1052 Reshape_960" -> "1053 Transpose_961" [label="[]", style=solid]; -"1053 Transpose_961" -> "1056 Unsqueeze_964" [label="[]", style=solid]; -"1053 Transpose_961" -> "1082 MatMul_990" [label="[]", style=solid]; -"1054 Transpose_962" -> "1055 Unsqueeze_963" [label="[]", style=solid]; -"1055 Unsqueeze_963" -> "1057 Concat_965" [label="[]", style=solid]; -"1056 Unsqueeze_964" -> "1057 Concat_965" [label="[]", style=solid]; -"1057 Concat_965" -> "2831 nncf_model_output_5" [label="[2, 1, 12, 8, 64]", style=solid]; -"1058 MatMul_966" -> "1060 Div_968" [label="[]", style=solid]; -"1059 Constant_967" -> "1060 Div_968" [label="[]", style=solid]; -"1060 Div_968" -> "1061 Shape_969" [label="[]", style=solid]; -"1060 Div_968" -> "1064 Shape_972" [label="[]", style=solid]; -"1060 Div_968" -> "1075 Mul_983" [label="[]", style=solid]; -"1061 Shape_969" -> "1063 Gather_971" [label="[-1]", style=dashed]; -"1062 Constant_970" -> "1063 Gather_971" [label="[]", style=dashed]; -"1063 Gather_971" -> "1067 Sub_975" [label="[]", style=dashed]; -"1064 Shape_972" -> "1066 Gather_974" [label="[-1]", style=dashed]; -"1065 Constant_973" -> "1066 Gather_974" [label="[]", style=dashed]; -"1066 Gather_974" -> "1067 Sub_975" [label="[]", style=dashed]; -"1066 Gather_974" -> "1069 Unsqueeze_977" [label="[]", style=dashed]; -"1066 Gather_974" -> "1072 Unsqueeze_980" [label="[]", style=dashed]; -"1067 Sub_975" -> "1068 Unsqueeze_976" [label="[]", style=dashed]; -"1068 Unsqueeze_976" -> "1071 Slice_979" [label="[1]", style=dashed]; -"1069 Unsqueeze_977" -> "1071 Slice_979" [label="[1]", style=dashed]; -"1070 Constant_978" -> "1071 Slice_979" [label="[1]", style=dashed]; -"1071 Slice_979" -> "1074 Slice_982" [label="[]", style=solid]; -"1072 Unsqueeze_980" -> "1074 Slice_982" [label="[1]", style=dashed]; -"1073 Constant_981" -> "1074 Slice_982" [label="[1]", style=dashed]; -"1074 Slice_982" -> "1075 Mul_983" [label="[]", style=solid]; -"1074 Slice_982" -> "1077 Sub_985" [label="[]", style=solid]; -"1075 Mul_983" -> "1080 Sub_988" [label="[]", style=solid]; -"1076 Constant_984" -> "1077 Sub_985" [label="[]", style=solid]; -"1077 Sub_985" -> "1079 Mul_987" [label="[]", style=solid]; -"1078 Constant_986" -> "1079 Mul_987" [label="[]", style=solid]; -"1079 Mul_987" -> "1080 Sub_988" [label="[]", style=solid]; -"1080 Sub_988" -> "1081 Softmax_989" [label="[]", style=solid]; -"1081 Softmax_989" -> "1082 MatMul_990" [label="[]", style=solid]; -"1082 MatMul_990" -> "1083 QuantizeLinear_1297_1" [label="[]", style=solid]; -"1083 QuantizeLinear_1297_1" -> "1084 DequantizeLinear_1297_1" [label="[]", style=dashed]; -"1084 DequantizeLinear_1297_1" -> "1085 Transpose_991" [label="[]", style=solid]; -"1085 Transpose_991" -> "1086 Shape_992" [label="[]", style=solid]; -"1085 Transpose_991" -> "1089 Shape_995" [label="[]", style=solid]; -"1085 Transpose_991" -> "1092 Shape_998" [label="[]", style=solid]; -"1085 Transpose_991" -> "1095 Shape_1001" [label="[]", style=solid]; -"1085 Transpose_991" -> "1103 Reshape_1009" [label="[]", style=solid]; -"1086 Shape_992" -> "1088 Gather_994" [label="[-1]", style=dashed]; -"1087 Constant_993" -> "1088 Gather_994" [label="[]", style=dashed]; -"1088 Gather_994" -> "1099 Unsqueeze_1005" [label="[]", style=dashed]; -"1089 Shape_995" -> "1091 Gather_997" [label="[-1]", style=dashed]; -"1090 Constant_996" -> "1091 Gather_997" [label="[]", style=dashed]; -"1091 Gather_997" -> "1100 Unsqueeze_1006" [label="[]", style=dashed]; -"1092 Shape_998" -> "1094 Gather_1000" [label="[-1]", style=dashed]; -"1093 Constant_999" -> "1094 Gather_1000" [label="[]", style=dashed]; -"1094 Gather_1000" -> "1098 Mul_1004" [label="[]", style=dashed]; -"1095 Shape_1001" -> "1097 Gather_1003" [label="[-1]", style=dashed]; -"1096 Constant_1002" -> "1097 Gather_1003" [label="[]", style=dashed]; -"1097 Gather_1003" -> "1098 Mul_1004" [label="[]", style=dashed]; -"1098 Mul_1004" -> "1101 Unsqueeze_1007" [label="[]", style=dashed]; -"1099 Unsqueeze_1005" -> "1102 Concat_1008" [label="[1]", style=dashed]; -"1100 Unsqueeze_1006" -> "1102 Concat_1008" [label="[1]", style=dashed]; -"1101 Unsqueeze_1007" -> "1102 Concat_1008" [label="[1]", style=dashed]; -"1102 Concat_1008" -> "1103 Reshape_1009" [label="[3]", style=dashed]; -"1103 Reshape_1009" -> "1104 Shape_1010" [label="[]", style=solid]; -"1103 Reshape_1009" -> "1107 Shape_1013" [label="[]", style=solid]; -"1103 Reshape_1009" -> "1110 Shape_1016" [label="[]", style=solid]; -"1103 Reshape_1009" -> "1115 Reshape_1021" [label="[]", style=solid]; -"1104 Shape_1010" -> "1106 Gather_1012" [label="[-1]", style=dashed]; -"1105 Constant_1011" -> "1106 Gather_1012" [label="[]", style=dashed]; -"1106 Gather_1012" -> "1119 Unsqueeze_1023" [label="[]", style=dashed]; -"1107 Shape_1013" -> "1109 Gather_1015" [label="[-1]", style=dashed]; -"1108 Constant_1014" -> "1109 Gather_1015" [label="[]", style=dashed]; -"1109 Gather_1015" -> "1120 Unsqueeze_1024" [label="[]", style=dashed]; -"1110 Shape_1016" -> "1112 Gather_1018" [label="[-1]", style=dashed]; -"1111 Constant_1017" -> "1112 Gather_1018" [label="[]", style=dashed]; -"1112 Gather_1018" -> "1113 Unsqueeze_1019" [label="[]", style=dashed]; -"1113 Unsqueeze_1019" -> "1114 Concat_1020" [label="[1]", style=dashed]; -"1114 Concat_1020" -> "1115 Reshape_1021" [label="[2]", style=dashed]; -"1115 Reshape_1021" -> "1118 Gemm_1022" [label="[]", style=solid]; -"1116 QuantizeLinear_h.4.attn.c_proj.weight_1" -> "1117 DequantizeLinear_h.4.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; -"1117 DequantizeLinear_h.4.attn.c_proj.weight_1" -> "1118 Gemm_1022" [label="[768, 768]", style=solid]; -"1118 Gemm_1022" -> "1122 Reshape_1026" [label="[]", style=solid]; -"1119 Unsqueeze_1023" -> "1121 Concat_1025" [label="[1]", style=dashed]; -"1120 Unsqueeze_1024" -> "1121 Concat_1025" [label="[1]", style=dashed]; -"1121 Concat_1025" -> "1122 Reshape_1026" [label="[3]", style=dashed]; -"1122 Reshape_1026" -> "1123 Add_1027" [label="[]", style=solid]; -"1123 Add_1027" -> "1124 ReduceMean_1028" [label="[]", style=solid]; -"1123 Add_1027" -> "1125 Sub_1029" [label="[]", style=solid]; -"1123 Add_1027" -> "1190 Add_1086" [label="[]", style=solid]; -"1124 ReduceMean_1028" -> "1125 Sub_1029" [label="[]", style=solid]; -"1125 Sub_1029" -> "1127 Pow_1031" [label="[]", style=solid]; -"1125 Sub_1029" -> "1132 Div_1036" [label="[]", style=solid]; -"1126 Constant_1030" -> "1127 Pow_1031" [label="[]", style=solid]; -"1127 Pow_1031" -> "1128 ReduceMean_1032" [label="[]", style=solid]; -"1128 ReduceMean_1032" -> "1130 Add_1034" [label="[]", style=solid]; -"1129 Constant_1033" -> "1130 Add_1034" [label="[]", style=solid]; -"1130 Add_1034" -> "1131 Sqrt_1035" [label="[]", style=solid]; -"1131 Sqrt_1035" -> "1132 Div_1036" [label="[]", style=solid]; -"1132 Div_1036" -> "1133 Mul_1037" [label="[]", style=solid]; -"1133 Mul_1037" -> "1134 Add_1038" [label="[]", style=solid]; -"1134 Add_1038" -> "1135 QuantizeLinear_1349_1" [label="[]", style=solid]; -"1135 QuantizeLinear_1349_1" -> "1136 DequantizeLinear_1349_1" [label="[]", style=dashed]; -"1136 DequantizeLinear_1349_1" -> "1137 Shape_1039" [label="[]", style=solid]; -"1136 DequantizeLinear_1349_1" -> "1140 Shape_1042" [label="[]", style=solid]; -"1136 DequantizeLinear_1349_1" -> "1143 Shape_1045" [label="[]", style=solid]; -"1136 DequantizeLinear_1349_1" -> "1148 Reshape_1050" [label="[]", style=solid]; -"1137 Shape_1039" -> "1139 Gather_1041" [label="[-1]", style=dashed]; -"1138 Constant_1040" -> "1139 Gather_1041" [label="[]", style=dashed]; -"1139 Gather_1041" -> "1152 Unsqueeze_1052" [label="[]", style=dashed]; -"1140 Shape_1042" -> "1142 Gather_1044" [label="[-1]", style=dashed]; -"1141 Constant_1043" -> "1142 Gather_1044" [label="[]", style=dashed]; -"1142 Gather_1044" -> "1153 Unsqueeze_1053" [label="[]", style=dashed]; -"1143 Shape_1045" -> "1145 Gather_1047" [label="[-1]", style=dashed]; -"1144 Constant_1046" -> "1145 Gather_1047" [label="[]", style=dashed]; -"1145 Gather_1047" -> "1146 Unsqueeze_1048" [label="[]", style=dashed]; -"1146 Unsqueeze_1048" -> "1147 Concat_1049" [label="[1]", style=dashed]; -"1147 Concat_1049" -> "1148 Reshape_1050" [label="[2]", style=dashed]; -"1148 Reshape_1050" -> "1151 Gemm_1051" [label="[]", style=solid]; -"1149 QuantizeLinear_h.4.mlp.c_fc.weight_1" -> "1150 DequantizeLinear_h.4.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; -"1150 DequantizeLinear_h.4.mlp.c_fc.weight_1" -> "1151 Gemm_1051" [label="[768, 3072]", style=solid]; -"1151 Gemm_1051" -> "1155 Reshape_1055" [label="[]", style=solid]; -"1152 Unsqueeze_1052" -> "1154 Concat_1054" [label="[1]", style=dashed]; -"1153 Unsqueeze_1053" -> "1154 Concat_1054" [label="[1]", style=dashed]; -"1154 Concat_1054" -> "1155 Reshape_1055" [label="[3]", style=dashed]; -"1155 Reshape_1055" -> "1157 Mul_1057" [label="[]", style=solid]; -"1155 Reshape_1055" -> "1159 Pow_1059" [label="[]", style=solid]; -"1155 Reshape_1055" -> "1162 Add_1062" [label="[]", style=solid]; -"1156 Constant_1056" -> "1157 Mul_1057" [label="[]", style=solid]; -"1157 Mul_1057" -> "1168 Mul_1068" [label="[]", style=solid]; -"1158 Constant_1058" -> "1159 Pow_1059" [label="[]", style=solid]; -"1159 Pow_1059" -> "1161 Mul_1061" [label="[]", style=solid]; -"1160 Constant_1060" -> "1161 Mul_1061" [label="[]", style=solid]; -"1161 Mul_1061" -> "1162 Add_1062" [label="[]", style=solid]; -"1162 Add_1062" -> "1164 Mul_1064" [label="[]", style=solid]; -"1163 Constant_1063" -> "1164 Mul_1064" [label="[]", style=solid]; -"1164 Mul_1064" -> "1165 Tanh_1065" [label="[]", style=solid]; -"1165 Tanh_1065" -> "1167 Add_1067" [label="[]", style=solid]; -"1166 Constant_1066" -> "1167 Add_1067" [label="[]", style=solid]; -"1167 Add_1067" -> "1168 Mul_1068" [label="[]", style=solid]; -"1168 Mul_1068" -> "1169 QuantizeLinear_1383_1" [label="[]", style=solid]; -"1169 QuantizeLinear_1383_1" -> "1170 DequantizeLinear_1383_1" [label="[]", style=dashed]; -"1170 DequantizeLinear_1383_1" -> "1171 Shape_1069" [label="[]", style=solid]; -"1170 DequantizeLinear_1383_1" -> "1174 Shape_1072" [label="[]", style=solid]; -"1170 DequantizeLinear_1383_1" -> "1177 Shape_1075" [label="[]", style=solid]; -"1170 DequantizeLinear_1383_1" -> "1182 Reshape_1080" [label="[]", style=solid]; -"1171 Shape_1069" -> "1173 Gather_1071" [label="[-1]", style=dashed]; -"1172 Constant_1070" -> "1173 Gather_1071" [label="[]", style=dashed]; -"1173 Gather_1071" -> "1186 Unsqueeze_1082" [label="[]", style=dashed]; -"1174 Shape_1072" -> "1176 Gather_1074" [label="[-1]", style=dashed]; -"1175 Constant_1073" -> "1176 Gather_1074" [label="[]", style=dashed]; -"1176 Gather_1074" -> "1187 Unsqueeze_1083" [label="[]", style=dashed]; -"1177 Shape_1075" -> "1179 Gather_1077" [label="[-1]", style=dashed]; -"1178 Constant_1076" -> "1179 Gather_1077" [label="[]", style=dashed]; -"1179 Gather_1077" -> "1180 Unsqueeze_1078" [label="[]", style=dashed]; -"1180 Unsqueeze_1078" -> "1181 Concat_1079" [label="[1]", style=dashed]; -"1181 Concat_1079" -> "1182 Reshape_1080" [label="[2]", style=dashed]; -"1182 Reshape_1080" -> "1185 Gemm_1081" [label="[]", style=solid]; -"1183 QuantizeLinear_h.4.mlp.c_proj.weight_1" -> "1184 DequantizeLinear_h.4.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; -"1184 DequantizeLinear_h.4.mlp.c_proj.weight_1" -> "1185 Gemm_1081" [label="[3072, 768]", style=solid]; -"1185 Gemm_1081" -> "1189 Reshape_1085" [label="[]", style=solid]; -"1186 Unsqueeze_1082" -> "1188 Concat_1084" [label="[1]", style=dashed]; -"1187 Unsqueeze_1083" -> "1188 Concat_1084" [label="[1]", style=dashed]; -"1188 Concat_1084" -> "1189 Reshape_1085" [label="[3]", style=dashed]; -"1189 Reshape_1085" -> "1190 Add_1086" [label="[]", style=solid]; -"1190 Add_1086" -> "1191 ReduceMean_1087" [label="[]", style=solid]; -"1190 Add_1086" -> "1192 Sub_1088" [label="[]", style=solid]; -"1190 Add_1086" -> "1354 Add_1238" [label="[]", style=solid]; -"1191 ReduceMean_1087" -> "1192 Sub_1088" [label="[]", style=solid]; -"1192 Sub_1088" -> "1194 Pow_1090" [label="[]", style=solid]; -"1192 Sub_1088" -> "1199 Div_1095" [label="[]", style=solid]; -"1193 Constant_1089" -> "1194 Pow_1090" [label="[]", style=solid]; -"1194 Pow_1090" -> "1195 ReduceMean_1091" [label="[]", style=solid]; -"1195 ReduceMean_1091" -> "1197 Add_1093" [label="[]", style=solid]; -"1196 Constant_1092" -> "1197 Add_1093" [label="[]", style=solid]; -"1197 Add_1093" -> "1198 Sqrt_1094" [label="[]", style=solid]; -"1198 Sqrt_1094" -> "1199 Div_1095" [label="[]", style=solid]; -"1199 Div_1095" -> "1200 Mul_1096" [label="[]", style=solid]; -"1200 Mul_1096" -> "1201 Add_1097" [label="[]", style=solid]; -"1201 Add_1097" -> "1202 QuantizeLinear_1416_1" [label="[]", style=solid]; -"1202 QuantizeLinear_1416_1" -> "1203 DequantizeLinear_1416_1" [label="[]", style=dashed]; -"1203 DequantizeLinear_1416_1" -> "1204 Shape_1098" [label="[]", style=solid]; -"1203 DequantizeLinear_1416_1" -> "1207 Shape_1101" [label="[]", style=solid]; -"1203 DequantizeLinear_1416_1" -> "1210 Shape_1104" [label="[]", style=solid]; -"1203 DequantizeLinear_1416_1" -> "1215 Reshape_1109" [label="[]", style=solid]; -"1204 Shape_1098" -> "1206 Gather_1100" [label="[-1]", style=dashed]; -"1205 Constant_1099" -> "1206 Gather_1100" [label="[]", style=dashed]; -"1206 Gather_1100" -> "1219 Unsqueeze_1111" [label="[]", style=dashed]; -"1207 Shape_1101" -> "1209 Gather_1103" [label="[-1]", style=dashed]; -"1208 Constant_1102" -> "1209 Gather_1103" [label="[]", style=dashed]; -"1209 Gather_1103" -> "1220 Unsqueeze_1112" [label="[]", style=dashed]; -"1210 Shape_1104" -> "1212 Gather_1106" [label="[-1]", style=dashed]; -"1211 Constant_1105" -> "1212 Gather_1106" [label="[]", style=dashed]; -"1212 Gather_1106" -> "1213 Unsqueeze_1107" [label="[]", style=dashed]; -"1213 Unsqueeze_1107" -> "1214 Concat_1108" [label="[1]", style=dashed]; -"1214 Concat_1108" -> "1215 Reshape_1109" [label="[2]", style=dashed]; -"1215 Reshape_1109" -> "1218 Gemm_1110" [label="[]", style=solid]; -"1216 QuantizeLinear_h.5.attn.c_attn.weight_1" -> "1217 DequantizeLinear_h.5.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; -"1217 DequantizeLinear_h.5.attn.c_attn.weight_1" -> "1218 Gemm_1110" [label="[768, 2304]", style=solid]; -"1218 Gemm_1110" -> "1222 Reshape_1114" [label="[]", style=solid]; -"1219 Unsqueeze_1111" -> "1221 Concat_1113" [label="[1]", style=dashed]; -"1220 Unsqueeze_1112" -> "1221 Concat_1113" [label="[1]", style=dashed]; -"1221 Concat_1113" -> "1222 Reshape_1114" [label="[3]", style=dashed]; -"1222 Reshape_1114" -> "1223 Split_1115" [label="[]", style=solid]; -"1223 Split_1115" -> "1224 QuantizeLinear_query.11_1" [label="[]", style=solid]; -"1223 Split_1115" -> "1226 Shape_1116" [label="[]", style=solid]; -"1223 Split_1115" -> "1229 Shape_1119" [label="[]", style=solid]; -"1223 Split_1115" -> "1232 Shape_1122" [label="[]", style=solid]; -"1223 Split_1115" -> "1245 Shape_1135" [label="[]", style=solid]; -"1223 Split_1115" -> "1248 Shape_1138" [label="[]", style=solid]; -"1223 Split_1115" -> "1251 Shape_1141" [label="[]", style=solid]; -"1223 Split_1115" -> "1262 Reshape_1152" [label="[]", style=solid]; -"1223 Split_1115" -> "1266 Shape_1154" [label="[]", style=solid]; -"1223 Split_1115" -> "1269 Shape_1157" [label="[]", style=solid]; -"1223 Split_1115" -> "1272 Shape_1160" [label="[]", style=solid]; -"1223 Split_1115" -> "1283 Reshape_1171" [label="[]", style=solid]; -"1224 QuantizeLinear_query.11_1" -> "1225 DequantizeLinear_query.11_1" [label="[]", style=dashed]; -"1225 DequantizeLinear_query.11_1" -> "1243 Reshape_1133" [label="[]", style=solid]; -"1226 Shape_1116" -> "1228 Gather_1118" [label="[-1]", style=dashed]; -"1227 Constant_1117" -> "1228 Gather_1118" [label="[]", style=dashed]; -"1228 Gather_1118" -> "1239 Unsqueeze_1129" [label="[]", style=dashed]; -"1229 Shape_1119" -> "1231 Gather_1121" [label="[-1]", style=dashed]; -"1230 Constant_1120" -> "1231 Gather_1121" [label="[]", style=dashed]; -"1231 Gather_1121" -> "1240 Unsqueeze_1130" [label="[]", style=dashed]; -"1232 Shape_1122" -> "1234 Gather_1124" [label="[-1]", style=dashed]; -"1233 Constant_1123" -> "1234 Gather_1124" [label="[]", style=dashed]; -"1234 Gather_1124" -> "1236 Div_1126" [label="[]", style=dashed]; -"1235 Constant_1125" -> "1236 Div_1126" [label="[]", style=dashed]; -"1236 Div_1126" -> "1237 Cast_1127" [label="[]", style=dashed]; -"1237 Cast_1127" -> "1238 Cast_1128" [label="[]", style=dashed]; -"1238 Cast_1128" -> "1241 Unsqueeze_1131" [label="[]", style=dashed]; -"1239 Unsqueeze_1129" -> "1242 Concat_1132" [label="[1]", style=dashed]; -"1240 Unsqueeze_1130" -> "1242 Concat_1132" [label="[1]", style=dashed]; -"1241 Unsqueeze_1131" -> "1242 Concat_1132" [label="[1]", style=dashed]; -"1242 Concat_1132" -> "1243 Reshape_1133" [label="[4]", style=dashed]; -"1243 Reshape_1133" -> "1244 Transpose_1134" [label="[]", style=solid]; -"1244 Transpose_1134" -> "1289 MatMul_1177" [label="[]", style=solid]; -"1245 Shape_1135" -> "1247 Gather_1137" [label="[-1]", style=dashed]; -"1246 Constant_1136" -> "1247 Gather_1137" [label="[]", style=dashed]; -"1247 Gather_1137" -> "1258 Unsqueeze_1148" [label="[]", style=dashed]; -"1248 Shape_1138" -> "1250 Gather_1140" [label="[-1]", style=dashed]; -"1249 Constant_1139" -> "1250 Gather_1140" [label="[]", style=dashed]; -"1250 Gather_1140" -> "1259 Unsqueeze_1149" [label="[]", style=dashed]; -"1251 Shape_1141" -> "1253 Gather_1143" [label="[-1]", style=dashed]; -"1252 Constant_1142" -> "1253 Gather_1143" [label="[]", style=dashed]; -"1253 Gather_1143" -> "1255 Div_1145" [label="[]", style=dashed]; -"1254 Constant_1144" -> "1255 Div_1145" [label="[]", style=dashed]; -"1255 Div_1145" -> "1256 Cast_1146" [label="[]", style=dashed]; -"1256 Cast_1146" -> "1257 Cast_1147" [label="[]", style=dashed]; -"1257 Cast_1147" -> "1260 Unsqueeze_1150" [label="[]", style=dashed]; -"1258 Unsqueeze_1148" -> "1261 Concat_1151" [label="[1]", style=dashed]; -"1259 Unsqueeze_1149" -> "1261 Concat_1151" [label="[1]", style=dashed]; -"1260 Unsqueeze_1150" -> "1261 Concat_1151" [label="[1]", style=dashed]; -"1261 Concat_1151" -> "1262 Reshape_1152" [label="[4]", style=dashed]; -"1262 Reshape_1152" -> "1263 QuantizeLinear_1481_1" [label="[]", style=solid]; -"1262 Reshape_1152" -> "1285 Transpose_1173" [label="[]", style=solid]; -"1263 QuantizeLinear_1481_1" -> "1264 DequantizeLinear_1481_1" [label="[]", style=dashed]; -"1264 DequantizeLinear_1481_1" -> "1265 Transpose_1153" [label="[]", style=solid]; -"1265 Transpose_1153" -> "1289 MatMul_1177" [label="[]", style=solid]; -"1266 Shape_1154" -> "1268 Gather_1156" [label="[-1]", style=dashed]; -"1267 Constant_1155" -> "1268 Gather_1156" [label="[]", style=dashed]; -"1268 Gather_1156" -> "1279 Unsqueeze_1167" [label="[]", style=dashed]; -"1269 Shape_1157" -> "1271 Gather_1159" [label="[-1]", style=dashed]; -"1270 Constant_1158" -> "1271 Gather_1159" [label="[]", style=dashed]; -"1271 Gather_1159" -> "1280 Unsqueeze_1168" [label="[]", style=dashed]; -"1272 Shape_1160" -> "1274 Gather_1162" [label="[-1]", style=dashed]; -"1273 Constant_1161" -> "1274 Gather_1162" [label="[]", style=dashed]; -"1274 Gather_1162" -> "1276 Div_1164" [label="[]", style=dashed]; -"1275 Constant_1163" -> "1276 Div_1164" [label="[]", style=dashed]; -"1276 Div_1164" -> "1277 Cast_1165" [label="[]", style=dashed]; -"1277 Cast_1165" -> "1278 Cast_1166" [label="[]", style=dashed]; -"1278 Cast_1166" -> "1281 Unsqueeze_1169" [label="[]", style=dashed]; -"1279 Unsqueeze_1167" -> "1282 Concat_1170" [label="[1]", style=dashed]; -"1280 Unsqueeze_1168" -> "1282 Concat_1170" [label="[1]", style=dashed]; -"1281 Unsqueeze_1169" -> "1282 Concat_1170" [label="[1]", style=dashed]; -"1282 Concat_1170" -> "1283 Reshape_1171" [label="[4]", style=dashed]; -"1283 Reshape_1171" -> "1284 Transpose_1172" [label="[]", style=solid]; -"1284 Transpose_1172" -> "1287 Unsqueeze_1175" [label="[]", style=solid]; -"1284 Transpose_1172" -> "1313 MatMul_1201" [label="[]", style=solid]; -"1285 Transpose_1173" -> "1286 Unsqueeze_1174" [label="[]", style=solid]; -"1286 Unsqueeze_1174" -> "1288 Concat_1176" [label="[]", style=solid]; -"1287 Unsqueeze_1175" -> "1288 Concat_1176" [label="[]", style=solid]; -"1288 Concat_1176" -> "2832 nncf_model_output_6" [label="[2, 1, 12, 8, 64]", style=solid]; -"1289 MatMul_1177" -> "1291 Div_1179" [label="[]", style=solid]; -"1290 Constant_1178" -> "1291 Div_1179" [label="[]", style=solid]; -"1291 Div_1179" -> "1292 Shape_1180" [label="[]", style=solid]; -"1291 Div_1179" -> "1295 Shape_1183" [label="[]", style=solid]; -"1291 Div_1179" -> "1306 Mul_1194" [label="[]", style=solid]; -"1292 Shape_1180" -> "1294 Gather_1182" [label="[-1]", style=dashed]; -"1293 Constant_1181" -> "1294 Gather_1182" [label="[]", style=dashed]; -"1294 Gather_1182" -> "1298 Sub_1186" [label="[]", style=dashed]; -"1295 Shape_1183" -> "1297 Gather_1185" [label="[-1]", style=dashed]; -"1296 Constant_1184" -> "1297 Gather_1185" [label="[]", style=dashed]; -"1297 Gather_1185" -> "1298 Sub_1186" [label="[]", style=dashed]; -"1297 Gather_1185" -> "1300 Unsqueeze_1188" [label="[]", style=dashed]; -"1297 Gather_1185" -> "1303 Unsqueeze_1191" [label="[]", style=dashed]; -"1298 Sub_1186" -> "1299 Unsqueeze_1187" [label="[]", style=dashed]; -"1299 Unsqueeze_1187" -> "1302 Slice_1190" [label="[1]", style=dashed]; -"1300 Unsqueeze_1188" -> "1302 Slice_1190" [label="[1]", style=dashed]; -"1301 Constant_1189" -> "1302 Slice_1190" [label="[1]", style=dashed]; -"1302 Slice_1190" -> "1305 Slice_1193" [label="[]", style=solid]; -"1303 Unsqueeze_1191" -> "1305 Slice_1193" [label="[1]", style=dashed]; -"1304 Constant_1192" -> "1305 Slice_1193" [label="[1]", style=dashed]; -"1305 Slice_1193" -> "1306 Mul_1194" [label="[]", style=solid]; -"1305 Slice_1193" -> "1308 Sub_1196" [label="[]", style=solid]; -"1306 Mul_1194" -> "1311 Sub_1199" [label="[]", style=solid]; -"1307 Constant_1195" -> "1308 Sub_1196" [label="[]", style=solid]; -"1308 Sub_1196" -> "1310 Mul_1198" [label="[]", style=solid]; -"1309 Constant_1197" -> "1310 Mul_1198" [label="[]", style=solid]; -"1310 Mul_1198" -> "1311 Sub_1199" [label="[]", style=solid]; -"1311 Sub_1199" -> "1312 Softmax_1200" [label="[]", style=solid]; -"1312 Softmax_1200" -> "1313 MatMul_1201" [label="[]", style=solid]; -"1313 MatMul_1201" -> "1314 QuantizeLinear_1538_1" [label="[]", style=solid]; -"1314 QuantizeLinear_1538_1" -> "1315 DequantizeLinear_1538_1" [label="[]", style=dashed]; -"1315 DequantizeLinear_1538_1" -> "1316 Transpose_1202" [label="[]", style=solid]; -"1316 Transpose_1202" -> "1317 Shape_1203" [label="[]", style=solid]; -"1316 Transpose_1202" -> "1320 Shape_1206" [label="[]", style=solid]; -"1316 Transpose_1202" -> "1323 Shape_1209" [label="[]", style=solid]; -"1316 Transpose_1202" -> "1326 Shape_1212" [label="[]", style=solid]; -"1316 Transpose_1202" -> "1334 Reshape_1220" [label="[]", style=solid]; -"1317 Shape_1203" -> "1319 Gather_1205" [label="[-1]", style=dashed]; -"1318 Constant_1204" -> "1319 Gather_1205" [label="[]", style=dashed]; -"1319 Gather_1205" -> "1330 Unsqueeze_1216" [label="[]", style=dashed]; -"1320 Shape_1206" -> "1322 Gather_1208" [label="[-1]", style=dashed]; -"1321 Constant_1207" -> "1322 Gather_1208" [label="[]", style=dashed]; -"1322 Gather_1208" -> "1331 Unsqueeze_1217" [label="[]", style=dashed]; -"1323 Shape_1209" -> "1325 Gather_1211" [label="[-1]", style=dashed]; -"1324 Constant_1210" -> "1325 Gather_1211" [label="[]", style=dashed]; -"1325 Gather_1211" -> "1329 Mul_1215" [label="[]", style=dashed]; -"1326 Shape_1212" -> "1328 Gather_1214" [label="[-1]", style=dashed]; -"1327 Constant_1213" -> "1328 Gather_1214" [label="[]", style=dashed]; -"1328 Gather_1214" -> "1329 Mul_1215" [label="[]", style=dashed]; -"1329 Mul_1215" -> "1332 Unsqueeze_1218" [label="[]", style=dashed]; -"1330 Unsqueeze_1216" -> "1333 Concat_1219" [label="[1]", style=dashed]; -"1331 Unsqueeze_1217" -> "1333 Concat_1219" [label="[1]", style=dashed]; -"1332 Unsqueeze_1218" -> "1333 Concat_1219" [label="[1]", style=dashed]; -"1333 Concat_1219" -> "1334 Reshape_1220" [label="[3]", style=dashed]; -"1334 Reshape_1220" -> "1335 Shape_1221" [label="[]", style=solid]; -"1334 Reshape_1220" -> "1338 Shape_1224" [label="[]", style=solid]; -"1334 Reshape_1220" -> "1341 Shape_1227" [label="[]", style=solid]; -"1334 Reshape_1220" -> "1346 Reshape_1232" [label="[]", style=solid]; -"1335 Shape_1221" -> "1337 Gather_1223" [label="[-1]", style=dashed]; -"1336 Constant_1222" -> "1337 Gather_1223" [label="[]", style=dashed]; -"1337 Gather_1223" -> "1350 Unsqueeze_1234" [label="[]", style=dashed]; -"1338 Shape_1224" -> "1340 Gather_1226" [label="[-1]", style=dashed]; -"1339 Constant_1225" -> "1340 Gather_1226" [label="[]", style=dashed]; -"1340 Gather_1226" -> "1351 Unsqueeze_1235" [label="[]", style=dashed]; -"1341 Shape_1227" -> "1343 Gather_1229" [label="[-1]", style=dashed]; -"1342 Constant_1228" -> "1343 Gather_1229" [label="[]", style=dashed]; -"1343 Gather_1229" -> "1344 Unsqueeze_1230" [label="[]", style=dashed]; -"1344 Unsqueeze_1230" -> "1345 Concat_1231" [label="[1]", style=dashed]; -"1345 Concat_1231" -> "1346 Reshape_1232" [label="[2]", style=dashed]; -"1346 Reshape_1232" -> "1349 Gemm_1233" [label="[]", style=solid]; -"1347 QuantizeLinear_h.5.attn.c_proj.weight_1" -> "1348 DequantizeLinear_h.5.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; -"1348 DequantizeLinear_h.5.attn.c_proj.weight_1" -> "1349 Gemm_1233" [label="[768, 768]", style=solid]; -"1349 Gemm_1233" -> "1353 Reshape_1237" [label="[]", style=solid]; -"1350 Unsqueeze_1234" -> "1352 Concat_1236" [label="[1]", style=dashed]; -"1351 Unsqueeze_1235" -> "1352 Concat_1236" [label="[1]", style=dashed]; -"1352 Concat_1236" -> "1353 Reshape_1237" [label="[3]", style=dashed]; -"1353 Reshape_1237" -> "1354 Add_1238" [label="[]", style=solid]; -"1354 Add_1238" -> "1355 ReduceMean_1239" [label="[]", style=solid]; -"1354 Add_1238" -> "1356 Sub_1240" [label="[]", style=solid]; -"1354 Add_1238" -> "1421 Add_1297" [label="[]", style=solid]; -"1355 ReduceMean_1239" -> "1356 Sub_1240" [label="[]", style=solid]; -"1356 Sub_1240" -> "1358 Pow_1242" [label="[]", style=solid]; -"1356 Sub_1240" -> "1363 Div_1247" [label="[]", style=solid]; -"1357 Constant_1241" -> "1358 Pow_1242" [label="[]", style=solid]; -"1358 Pow_1242" -> "1359 ReduceMean_1243" [label="[]", style=solid]; -"1359 ReduceMean_1243" -> "1361 Add_1245" [label="[]", style=solid]; -"1360 Constant_1244" -> "1361 Add_1245" [label="[]", style=solid]; -"1361 Add_1245" -> "1362 Sqrt_1246" [label="[]", style=solid]; -"1362 Sqrt_1246" -> "1363 Div_1247" [label="[]", style=solid]; -"1363 Div_1247" -> "1364 Mul_1248" [label="[]", style=solid]; -"1364 Mul_1248" -> "1365 Add_1249" [label="[]", style=solid]; -"1365 Add_1249" -> "1366 QuantizeLinear_1590_1" [label="[]", style=solid]; -"1366 QuantizeLinear_1590_1" -> "1367 DequantizeLinear_1590_1" [label="[]", style=dashed]; -"1367 DequantizeLinear_1590_1" -> "1368 Shape_1250" [label="[]", style=solid]; -"1367 DequantizeLinear_1590_1" -> "1371 Shape_1253" [label="[]", style=solid]; -"1367 DequantizeLinear_1590_1" -> "1374 Shape_1256" [label="[]", style=solid]; -"1367 DequantizeLinear_1590_1" -> "1379 Reshape_1261" [label="[]", style=solid]; -"1368 Shape_1250" -> "1370 Gather_1252" [label="[-1]", style=dashed]; -"1369 Constant_1251" -> "1370 Gather_1252" [label="[]", style=dashed]; -"1370 Gather_1252" -> "1383 Unsqueeze_1263" [label="[]", style=dashed]; -"1371 Shape_1253" -> "1373 Gather_1255" [label="[-1]", style=dashed]; -"1372 Constant_1254" -> "1373 Gather_1255" [label="[]", style=dashed]; -"1373 Gather_1255" -> "1384 Unsqueeze_1264" [label="[]", style=dashed]; -"1374 Shape_1256" -> "1376 Gather_1258" [label="[-1]", style=dashed]; -"1375 Constant_1257" -> "1376 Gather_1258" [label="[]", style=dashed]; -"1376 Gather_1258" -> "1377 Unsqueeze_1259" [label="[]", style=dashed]; -"1377 Unsqueeze_1259" -> "1378 Concat_1260" [label="[1]", style=dashed]; -"1378 Concat_1260" -> "1379 Reshape_1261" [label="[2]", style=dashed]; -"1379 Reshape_1261" -> "1382 Gemm_1262" [label="[]", style=solid]; -"1380 QuantizeLinear_h.5.mlp.c_fc.weight_1" -> "1381 DequantizeLinear_h.5.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; -"1381 DequantizeLinear_h.5.mlp.c_fc.weight_1" -> "1382 Gemm_1262" [label="[768, 3072]", style=solid]; -"1382 Gemm_1262" -> "1386 Reshape_1266" [label="[]", style=solid]; -"1383 Unsqueeze_1263" -> "1385 Concat_1265" [label="[1]", style=dashed]; -"1384 Unsqueeze_1264" -> "1385 Concat_1265" [label="[1]", style=dashed]; -"1385 Concat_1265" -> "1386 Reshape_1266" [label="[3]", style=dashed]; -"1386 Reshape_1266" -> "1388 Mul_1268" [label="[]", style=solid]; -"1386 Reshape_1266" -> "1390 Pow_1270" [label="[]", style=solid]; -"1386 Reshape_1266" -> "1393 Add_1273" [label="[]", style=solid]; -"1387 Constant_1267" -> "1388 Mul_1268" [label="[]", style=solid]; -"1388 Mul_1268" -> "1399 Mul_1279" [label="[]", style=solid]; -"1389 Constant_1269" -> "1390 Pow_1270" [label="[]", style=solid]; -"1390 Pow_1270" -> "1392 Mul_1272" [label="[]", style=solid]; -"1391 Constant_1271" -> "1392 Mul_1272" [label="[]", style=solid]; -"1392 Mul_1272" -> "1393 Add_1273" [label="[]", style=solid]; -"1393 Add_1273" -> "1395 Mul_1275" [label="[]", style=solid]; -"1394 Constant_1274" -> "1395 Mul_1275" [label="[]", style=solid]; -"1395 Mul_1275" -> "1396 Tanh_1276" [label="[]", style=solid]; -"1396 Tanh_1276" -> "1398 Add_1278" [label="[]", style=solid]; -"1397 Constant_1277" -> "1398 Add_1278" [label="[]", style=solid]; -"1398 Add_1278" -> "1399 Mul_1279" [label="[]", style=solid]; -"1399 Mul_1279" -> "1400 QuantizeLinear_1624_1" [label="[]", style=solid]; -"1400 QuantizeLinear_1624_1" -> "1401 DequantizeLinear_1624_1" [label="[]", style=dashed]; -"1401 DequantizeLinear_1624_1" -> "1402 Shape_1280" [label="[]", style=solid]; -"1401 DequantizeLinear_1624_1" -> "1405 Shape_1283" [label="[]", style=solid]; -"1401 DequantizeLinear_1624_1" -> "1408 Shape_1286" [label="[]", style=solid]; -"1401 DequantizeLinear_1624_1" -> "1413 Reshape_1291" [label="[]", style=solid]; -"1402 Shape_1280" -> "1404 Gather_1282" [label="[-1]", style=dashed]; -"1403 Constant_1281" -> "1404 Gather_1282" [label="[]", style=dashed]; -"1404 Gather_1282" -> "1417 Unsqueeze_1293" [label="[]", style=dashed]; -"1405 Shape_1283" -> "1407 Gather_1285" [label="[-1]", style=dashed]; -"1406 Constant_1284" -> "1407 Gather_1285" [label="[]", style=dashed]; -"1407 Gather_1285" -> "1418 Unsqueeze_1294" [label="[]", style=dashed]; -"1408 Shape_1286" -> "1410 Gather_1288" [label="[-1]", style=dashed]; -"1409 Constant_1287" -> "1410 Gather_1288" [label="[]", style=dashed]; -"1410 Gather_1288" -> "1411 Unsqueeze_1289" [label="[]", style=dashed]; -"1411 Unsqueeze_1289" -> "1412 Concat_1290" [label="[1]", style=dashed]; -"1412 Concat_1290" -> "1413 Reshape_1291" [label="[2]", style=dashed]; -"1413 Reshape_1291" -> "1416 Gemm_1292" [label="[]", style=solid]; -"1414 QuantizeLinear_h.5.mlp.c_proj.weight_1" -> "1415 DequantizeLinear_h.5.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; -"1415 DequantizeLinear_h.5.mlp.c_proj.weight_1" -> "1416 Gemm_1292" [label="[3072, 768]", style=solid]; -"1416 Gemm_1292" -> "1420 Reshape_1296" [label="[]", style=solid]; -"1417 Unsqueeze_1293" -> "1419 Concat_1295" [label="[1]", style=dashed]; -"1418 Unsqueeze_1294" -> "1419 Concat_1295" [label="[1]", style=dashed]; -"1419 Concat_1295" -> "1420 Reshape_1296" [label="[3]", style=dashed]; -"1420 Reshape_1296" -> "1421 Add_1297" [label="[]", style=solid]; -"1421 Add_1297" -> "1422 ReduceMean_1298" [label="[]", style=solid]; -"1421 Add_1297" -> "1423 Sub_1299" [label="[]", style=solid]; -"1421 Add_1297" -> "1585 Add_1449" [label="[]", style=solid]; -"1422 ReduceMean_1298" -> "1423 Sub_1299" [label="[]", style=solid]; -"1423 Sub_1299" -> "1425 Pow_1301" [label="[]", style=solid]; -"1423 Sub_1299" -> "1430 Div_1306" [label="[]", style=solid]; -"1424 Constant_1300" -> "1425 Pow_1301" [label="[]", style=solid]; -"1425 Pow_1301" -> "1426 ReduceMean_1302" [label="[]", style=solid]; -"1426 ReduceMean_1302" -> "1428 Add_1304" [label="[]", style=solid]; -"1427 Constant_1303" -> "1428 Add_1304" [label="[]", style=solid]; -"1428 Add_1304" -> "1429 Sqrt_1305" [label="[]", style=solid]; -"1429 Sqrt_1305" -> "1430 Div_1306" [label="[]", style=solid]; -"1430 Div_1306" -> "1431 Mul_1307" [label="[]", style=solid]; -"1431 Mul_1307" -> "1432 Add_1308" [label="[]", style=solid]; -"1432 Add_1308" -> "1433 QuantizeLinear_1657_1" [label="[]", style=solid]; -"1433 QuantizeLinear_1657_1" -> "1434 DequantizeLinear_1657_1" [label="[]", style=dashed]; -"1434 DequantizeLinear_1657_1" -> "1435 Shape_1309" [label="[]", style=solid]; -"1434 DequantizeLinear_1657_1" -> "1438 Shape_1312" [label="[]", style=solid]; -"1434 DequantizeLinear_1657_1" -> "1441 Shape_1315" [label="[]", style=solid]; -"1434 DequantizeLinear_1657_1" -> "1446 Reshape_1320" [label="[]", style=solid]; -"1435 Shape_1309" -> "1437 Gather_1311" [label="[-1]", style=dashed]; -"1436 Constant_1310" -> "1437 Gather_1311" [label="[]", style=dashed]; -"1437 Gather_1311" -> "1450 Unsqueeze_1322" [label="[]", style=dashed]; -"1438 Shape_1312" -> "1440 Gather_1314" [label="[-1]", style=dashed]; -"1439 Constant_1313" -> "1440 Gather_1314" [label="[]", style=dashed]; -"1440 Gather_1314" -> "1451 Unsqueeze_1323" [label="[]", style=dashed]; -"1441 Shape_1315" -> "1443 Gather_1317" [label="[-1]", style=dashed]; -"1442 Constant_1316" -> "1443 Gather_1317" [label="[]", style=dashed]; -"1443 Gather_1317" -> "1444 Unsqueeze_1318" [label="[]", style=dashed]; -"1444 Unsqueeze_1318" -> "1445 Concat_1319" [label="[1]", style=dashed]; -"1445 Concat_1319" -> "1446 Reshape_1320" [label="[2]", style=dashed]; -"1446 Reshape_1320" -> "1449 Gemm_1321" [label="[]", style=solid]; -"1447 QuantizeLinear_h.6.attn.c_attn.weight_1" -> "1448 DequantizeLinear_h.6.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; -"1448 DequantizeLinear_h.6.attn.c_attn.weight_1" -> "1449 Gemm_1321" [label="[768, 2304]", style=solid]; -"1449 Gemm_1321" -> "1453 Reshape_1325" [label="[]", style=solid]; -"1450 Unsqueeze_1322" -> "1452 Concat_1324" [label="[1]", style=dashed]; -"1451 Unsqueeze_1323" -> "1452 Concat_1324" [label="[1]", style=dashed]; -"1452 Concat_1324" -> "1453 Reshape_1325" [label="[3]", style=dashed]; -"1453 Reshape_1325" -> "1454 Split_1326" [label="[]", style=solid]; -"1454 Split_1326" -> "1455 QuantizeLinear_query.13_1" [label="[]", style=solid]; -"1454 Split_1326" -> "1457 Shape_1327" [label="[]", style=solid]; -"1454 Split_1326" -> "1460 Shape_1330" [label="[]", style=solid]; -"1454 Split_1326" -> "1463 Shape_1333" [label="[]", style=solid]; -"1454 Split_1326" -> "1476 Shape_1346" [label="[]", style=solid]; -"1454 Split_1326" -> "1479 Shape_1349" [label="[]", style=solid]; -"1454 Split_1326" -> "1482 Shape_1352" [label="[]", style=solid]; -"1454 Split_1326" -> "1493 Reshape_1363" [label="[]", style=solid]; -"1454 Split_1326" -> "1497 Shape_1365" [label="[]", style=solid]; -"1454 Split_1326" -> "1500 Shape_1368" [label="[]", style=solid]; -"1454 Split_1326" -> "1503 Shape_1371" [label="[]", style=solid]; -"1454 Split_1326" -> "1514 Reshape_1382" [label="[]", style=solid]; -"1455 QuantizeLinear_query.13_1" -> "1456 DequantizeLinear_query.13_1" [label="[]", style=dashed]; -"1456 DequantizeLinear_query.13_1" -> "1474 Reshape_1344" [label="[]", style=solid]; -"1457 Shape_1327" -> "1459 Gather_1329" [label="[-1]", style=dashed]; -"1458 Constant_1328" -> "1459 Gather_1329" [label="[]", style=dashed]; -"1459 Gather_1329" -> "1470 Unsqueeze_1340" [label="[]", style=dashed]; -"1460 Shape_1330" -> "1462 Gather_1332" [label="[-1]", style=dashed]; -"1461 Constant_1331" -> "1462 Gather_1332" [label="[]", style=dashed]; -"1462 Gather_1332" -> "1471 Unsqueeze_1341" [label="[]", style=dashed]; -"1463 Shape_1333" -> "1465 Gather_1335" [label="[-1]", style=dashed]; -"1464 Constant_1334" -> "1465 Gather_1335" [label="[]", style=dashed]; -"1465 Gather_1335" -> "1467 Div_1337" [label="[]", style=dashed]; -"1466 Constant_1336" -> "1467 Div_1337" [label="[]", style=dashed]; -"1467 Div_1337" -> "1468 Cast_1338" [label="[]", style=dashed]; -"1468 Cast_1338" -> "1469 Cast_1339" [label="[]", style=dashed]; -"1469 Cast_1339" -> "1472 Unsqueeze_1342" [label="[]", style=dashed]; -"1470 Unsqueeze_1340" -> "1473 Concat_1343" [label="[1]", style=dashed]; -"1471 Unsqueeze_1341" -> "1473 Concat_1343" [label="[1]", style=dashed]; -"1472 Unsqueeze_1342" -> "1473 Concat_1343" [label="[1]", style=dashed]; -"1473 Concat_1343" -> "1474 Reshape_1344" [label="[4]", style=dashed]; -"1474 Reshape_1344" -> "1475 Transpose_1345" [label="[]", style=solid]; -"1475 Transpose_1345" -> "1520 MatMul_1388" [label="[]", style=solid]; -"1476 Shape_1346" -> "1478 Gather_1348" [label="[-1]", style=dashed]; -"1477 Constant_1347" -> "1478 Gather_1348" [label="[]", style=dashed]; -"1478 Gather_1348" -> "1489 Unsqueeze_1359" [label="[]", style=dashed]; -"1479 Shape_1349" -> "1481 Gather_1351" [label="[-1]", style=dashed]; -"1480 Constant_1350" -> "1481 Gather_1351" [label="[]", style=dashed]; -"1481 Gather_1351" -> "1490 Unsqueeze_1360" [label="[]", style=dashed]; -"1482 Shape_1352" -> "1484 Gather_1354" [label="[-1]", style=dashed]; -"1483 Constant_1353" -> "1484 Gather_1354" [label="[]", style=dashed]; -"1484 Gather_1354" -> "1486 Div_1356" [label="[]", style=dashed]; -"1485 Constant_1355" -> "1486 Div_1356" [label="[]", style=dashed]; -"1486 Div_1356" -> "1487 Cast_1357" [label="[]", style=dashed]; -"1487 Cast_1357" -> "1488 Cast_1358" [label="[]", style=dashed]; -"1488 Cast_1358" -> "1491 Unsqueeze_1361" [label="[]", style=dashed]; -"1489 Unsqueeze_1359" -> "1492 Concat_1362" [label="[1]", style=dashed]; -"1490 Unsqueeze_1360" -> "1492 Concat_1362" [label="[1]", style=dashed]; -"1491 Unsqueeze_1361" -> "1492 Concat_1362" [label="[1]", style=dashed]; -"1492 Concat_1362" -> "1493 Reshape_1363" [label="[4]", style=dashed]; -"1493 Reshape_1363" -> "1494 QuantizeLinear_1722_1" [label="[]", style=solid]; -"1493 Reshape_1363" -> "1516 Transpose_1384" [label="[]", style=solid]; -"1494 QuantizeLinear_1722_1" -> "1495 DequantizeLinear_1722_1" [label="[]", style=dashed]; -"1495 DequantizeLinear_1722_1" -> "1496 Transpose_1364" [label="[]", style=solid]; -"1496 Transpose_1364" -> "1520 MatMul_1388" [label="[]", style=solid]; -"1497 Shape_1365" -> "1499 Gather_1367" [label="[-1]", style=dashed]; -"1498 Constant_1366" -> "1499 Gather_1367" [label="[]", style=dashed]; -"1499 Gather_1367" -> "1510 Unsqueeze_1378" [label="[]", style=dashed]; -"1500 Shape_1368" -> "1502 Gather_1370" [label="[-1]", style=dashed]; -"1501 Constant_1369" -> "1502 Gather_1370" [label="[]", style=dashed]; -"1502 Gather_1370" -> "1511 Unsqueeze_1379" [label="[]", style=dashed]; -"1503 Shape_1371" -> "1505 Gather_1373" [label="[-1]", style=dashed]; -"1504 Constant_1372" -> "1505 Gather_1373" [label="[]", style=dashed]; -"1505 Gather_1373" -> "1507 Div_1375" [label="[]", style=dashed]; -"1506 Constant_1374" -> "1507 Div_1375" [label="[]", style=dashed]; -"1507 Div_1375" -> "1508 Cast_1376" [label="[]", style=dashed]; -"1508 Cast_1376" -> "1509 Cast_1377" [label="[]", style=dashed]; -"1509 Cast_1377" -> "1512 Unsqueeze_1380" [label="[]", style=dashed]; -"1510 Unsqueeze_1378" -> "1513 Concat_1381" [label="[1]", style=dashed]; -"1511 Unsqueeze_1379" -> "1513 Concat_1381" [label="[1]", style=dashed]; -"1512 Unsqueeze_1380" -> "1513 Concat_1381" [label="[1]", style=dashed]; -"1513 Concat_1381" -> "1514 Reshape_1382" [label="[4]", style=dashed]; -"1514 Reshape_1382" -> "1515 Transpose_1383" [label="[]", style=solid]; -"1515 Transpose_1383" -> "1518 Unsqueeze_1386" [label="[]", style=solid]; -"1515 Transpose_1383" -> "1544 MatMul_1412" [label="[]", style=solid]; -"1516 Transpose_1384" -> "1517 Unsqueeze_1385" [label="[]", style=solid]; -"1517 Unsqueeze_1385" -> "1519 Concat_1387" [label="[]", style=solid]; -"1518 Unsqueeze_1386" -> "1519 Concat_1387" [label="[]", style=solid]; -"1519 Concat_1387" -> "2833 nncf_model_output_7" [label="[2, 1, 12, 8, 64]", style=solid]; -"1520 MatMul_1388" -> "1522 Div_1390" [label="[]", style=solid]; -"1521 Constant_1389" -> "1522 Div_1390" [label="[]", style=solid]; -"1522 Div_1390" -> "1523 Shape_1391" [label="[]", style=solid]; -"1522 Div_1390" -> "1526 Shape_1394" [label="[]", style=solid]; -"1522 Div_1390" -> "1537 Mul_1405" [label="[]", style=solid]; -"1523 Shape_1391" -> "1525 Gather_1393" [label="[-1]", style=dashed]; -"1524 Constant_1392" -> "1525 Gather_1393" [label="[]", style=dashed]; -"1525 Gather_1393" -> "1529 Sub_1397" [label="[]", style=dashed]; -"1526 Shape_1394" -> "1528 Gather_1396" [label="[-1]", style=dashed]; -"1527 Constant_1395" -> "1528 Gather_1396" [label="[]", style=dashed]; -"1528 Gather_1396" -> "1529 Sub_1397" [label="[]", style=dashed]; -"1528 Gather_1396" -> "1531 Unsqueeze_1399" [label="[]", style=dashed]; -"1528 Gather_1396" -> "1534 Unsqueeze_1402" [label="[]", style=dashed]; -"1529 Sub_1397" -> "1530 Unsqueeze_1398" [label="[]", style=dashed]; -"1530 Unsqueeze_1398" -> "1533 Slice_1401" [label="[1]", style=dashed]; -"1531 Unsqueeze_1399" -> "1533 Slice_1401" [label="[1]", style=dashed]; -"1532 Constant_1400" -> "1533 Slice_1401" [label="[1]", style=dashed]; -"1533 Slice_1401" -> "1536 Slice_1404" [label="[]", style=solid]; -"1534 Unsqueeze_1402" -> "1536 Slice_1404" [label="[1]", style=dashed]; -"1535 Constant_1403" -> "1536 Slice_1404" [label="[1]", style=dashed]; -"1536 Slice_1404" -> "1537 Mul_1405" [label="[]", style=solid]; -"1536 Slice_1404" -> "1539 Sub_1407" [label="[]", style=solid]; -"1537 Mul_1405" -> "1542 Sub_1410" [label="[]", style=solid]; -"1538 Constant_1406" -> "1539 Sub_1407" [label="[]", style=solid]; -"1539 Sub_1407" -> "1541 Mul_1409" [label="[]", style=solid]; -"1540 Constant_1408" -> "1541 Mul_1409" [label="[]", style=solid]; -"1541 Mul_1409" -> "1542 Sub_1410" [label="[]", style=solid]; -"1542 Sub_1410" -> "1543 Softmax_1411" [label="[]", style=solid]; -"1543 Softmax_1411" -> "1544 MatMul_1412" [label="[]", style=solid]; -"1544 MatMul_1412" -> "1545 QuantizeLinear_1779_1" [label="[]", style=solid]; -"1545 QuantizeLinear_1779_1" -> "1546 DequantizeLinear_1779_1" [label="[]", style=dashed]; -"1546 DequantizeLinear_1779_1" -> "1547 Transpose_1413" [label="[]", style=solid]; -"1547 Transpose_1413" -> "1548 Shape_1414" [label="[]", style=solid]; -"1547 Transpose_1413" -> "1551 Shape_1417" [label="[]", style=solid]; -"1547 Transpose_1413" -> "1554 Shape_1420" [label="[]", style=solid]; -"1547 Transpose_1413" -> "1557 Shape_1423" [label="[]", style=solid]; -"1547 Transpose_1413" -> "1565 Reshape_1431" [label="[]", style=solid]; -"1548 Shape_1414" -> "1550 Gather_1416" [label="[-1]", style=dashed]; -"1549 Constant_1415" -> "1550 Gather_1416" [label="[]", style=dashed]; -"1550 Gather_1416" -> "1561 Unsqueeze_1427" [label="[]", style=dashed]; -"1551 Shape_1417" -> "1553 Gather_1419" [label="[-1]", style=dashed]; -"1552 Constant_1418" -> "1553 Gather_1419" [label="[]", style=dashed]; -"1553 Gather_1419" -> "1562 Unsqueeze_1428" [label="[]", style=dashed]; -"1554 Shape_1420" -> "1556 Gather_1422" [label="[-1]", style=dashed]; -"1555 Constant_1421" -> "1556 Gather_1422" [label="[]", style=dashed]; -"1556 Gather_1422" -> "1560 Mul_1426" [label="[]", style=dashed]; -"1557 Shape_1423" -> "1559 Gather_1425" [label="[-1]", style=dashed]; -"1558 Constant_1424" -> "1559 Gather_1425" [label="[]", style=dashed]; -"1559 Gather_1425" -> "1560 Mul_1426" [label="[]", style=dashed]; -"1560 Mul_1426" -> "1563 Unsqueeze_1429" [label="[]", style=dashed]; -"1561 Unsqueeze_1427" -> "1564 Concat_1430" [label="[1]", style=dashed]; -"1562 Unsqueeze_1428" -> "1564 Concat_1430" [label="[1]", style=dashed]; -"1563 Unsqueeze_1429" -> "1564 Concat_1430" [label="[1]", style=dashed]; -"1564 Concat_1430" -> "1565 Reshape_1431" [label="[3]", style=dashed]; -"1565 Reshape_1431" -> "1566 Shape_1432" [label="[]", style=solid]; -"1565 Reshape_1431" -> "1569 Shape_1435" [label="[]", style=solid]; -"1565 Reshape_1431" -> "1572 Shape_1438" [label="[]", style=solid]; -"1565 Reshape_1431" -> "1577 Reshape_1443" [label="[]", style=solid]; -"1566 Shape_1432" -> "1568 Gather_1434" [label="[-1]", style=dashed]; -"1567 Constant_1433" -> "1568 Gather_1434" [label="[]", style=dashed]; -"1568 Gather_1434" -> "1581 Unsqueeze_1445" [label="[]", style=dashed]; -"1569 Shape_1435" -> "1571 Gather_1437" [label="[-1]", style=dashed]; -"1570 Constant_1436" -> "1571 Gather_1437" [label="[]", style=dashed]; -"1571 Gather_1437" -> "1582 Unsqueeze_1446" [label="[]", style=dashed]; -"1572 Shape_1438" -> "1574 Gather_1440" [label="[-1]", style=dashed]; -"1573 Constant_1439" -> "1574 Gather_1440" [label="[]", style=dashed]; -"1574 Gather_1440" -> "1575 Unsqueeze_1441" [label="[]", style=dashed]; -"1575 Unsqueeze_1441" -> "1576 Concat_1442" [label="[1]", style=dashed]; -"1576 Concat_1442" -> "1577 Reshape_1443" [label="[2]", style=dashed]; -"1577 Reshape_1443" -> "1580 Gemm_1444" [label="[]", style=solid]; -"1578 QuantizeLinear_h.6.attn.c_proj.weight_1" -> "1579 DequantizeLinear_h.6.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; -"1579 DequantizeLinear_h.6.attn.c_proj.weight_1" -> "1580 Gemm_1444" [label="[768, 768]", style=solid]; -"1580 Gemm_1444" -> "1584 Reshape_1448" [label="[]", style=solid]; -"1581 Unsqueeze_1445" -> "1583 Concat_1447" [label="[1]", style=dashed]; -"1582 Unsqueeze_1446" -> "1583 Concat_1447" [label="[1]", style=dashed]; -"1583 Concat_1447" -> "1584 Reshape_1448" [label="[3]", style=dashed]; -"1584 Reshape_1448" -> "1585 Add_1449" [label="[]", style=solid]; -"1585 Add_1449" -> "1586 ReduceMean_1450" [label="[]", style=solid]; -"1585 Add_1449" -> "1587 Sub_1451" [label="[]", style=solid]; -"1585 Add_1449" -> "1652 Add_1508" [label="[]", style=solid]; -"1586 ReduceMean_1450" -> "1587 Sub_1451" [label="[]", style=solid]; -"1587 Sub_1451" -> "1589 Pow_1453" [label="[]", style=solid]; -"1587 Sub_1451" -> "1594 Div_1458" [label="[]", style=solid]; -"1588 Constant_1452" -> "1589 Pow_1453" [label="[]", style=solid]; -"1589 Pow_1453" -> "1590 ReduceMean_1454" [label="[]", style=solid]; -"1590 ReduceMean_1454" -> "1592 Add_1456" [label="[]", style=solid]; -"1591 Constant_1455" -> "1592 Add_1456" [label="[]", style=solid]; -"1592 Add_1456" -> "1593 Sqrt_1457" [label="[]", style=solid]; -"1593 Sqrt_1457" -> "1594 Div_1458" [label="[]", style=solid]; -"1594 Div_1458" -> "1595 Mul_1459" [label="[]", style=solid]; -"1595 Mul_1459" -> "1596 Add_1460" [label="[]", style=solid]; -"1596 Add_1460" -> "1597 QuantizeLinear_1831_1" [label="[]", style=solid]; -"1597 QuantizeLinear_1831_1" -> "1598 DequantizeLinear_1831_1" [label="[]", style=dashed]; -"1598 DequantizeLinear_1831_1" -> "1599 Shape_1461" [label="[]", style=solid]; -"1598 DequantizeLinear_1831_1" -> "1602 Shape_1464" [label="[]", style=solid]; -"1598 DequantizeLinear_1831_1" -> "1605 Shape_1467" [label="[]", style=solid]; -"1598 DequantizeLinear_1831_1" -> "1610 Reshape_1472" [label="[]", style=solid]; -"1599 Shape_1461" -> "1601 Gather_1463" [label="[-1]", style=dashed]; -"1600 Constant_1462" -> "1601 Gather_1463" [label="[]", style=dashed]; -"1601 Gather_1463" -> "1614 Unsqueeze_1474" [label="[]", style=dashed]; -"1602 Shape_1464" -> "1604 Gather_1466" [label="[-1]", style=dashed]; -"1603 Constant_1465" -> "1604 Gather_1466" [label="[]", style=dashed]; -"1604 Gather_1466" -> "1615 Unsqueeze_1475" [label="[]", style=dashed]; -"1605 Shape_1467" -> "1607 Gather_1469" [label="[-1]", style=dashed]; -"1606 Constant_1468" -> "1607 Gather_1469" [label="[]", style=dashed]; -"1607 Gather_1469" -> "1608 Unsqueeze_1470" [label="[]", style=dashed]; -"1608 Unsqueeze_1470" -> "1609 Concat_1471" [label="[1]", style=dashed]; -"1609 Concat_1471" -> "1610 Reshape_1472" [label="[2]", style=dashed]; -"1610 Reshape_1472" -> "1613 Gemm_1473" [label="[]", style=solid]; -"1611 QuantizeLinear_h.6.mlp.c_fc.weight_1" -> "1612 DequantizeLinear_h.6.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; -"1612 DequantizeLinear_h.6.mlp.c_fc.weight_1" -> "1613 Gemm_1473" [label="[768, 3072]", style=solid]; -"1613 Gemm_1473" -> "1617 Reshape_1477" [label="[]", style=solid]; -"1614 Unsqueeze_1474" -> "1616 Concat_1476" [label="[1]", style=dashed]; -"1615 Unsqueeze_1475" -> "1616 Concat_1476" [label="[1]", style=dashed]; -"1616 Concat_1476" -> "1617 Reshape_1477" [label="[3]", style=dashed]; -"1617 Reshape_1477" -> "1619 Mul_1479" [label="[]", style=solid]; -"1617 Reshape_1477" -> "1621 Pow_1481" [label="[]", style=solid]; -"1617 Reshape_1477" -> "1624 Add_1484" [label="[]", style=solid]; -"1618 Constant_1478" -> "1619 Mul_1479" [label="[]", style=solid]; -"1619 Mul_1479" -> "1630 Mul_1490" [label="[]", style=solid]; -"1620 Constant_1480" -> "1621 Pow_1481" [label="[]", style=solid]; -"1621 Pow_1481" -> "1623 Mul_1483" [label="[]", style=solid]; -"1622 Constant_1482" -> "1623 Mul_1483" [label="[]", style=solid]; -"1623 Mul_1483" -> "1624 Add_1484" [label="[]", style=solid]; -"1624 Add_1484" -> "1626 Mul_1486" [label="[]", style=solid]; -"1625 Constant_1485" -> "1626 Mul_1486" [label="[]", style=solid]; -"1626 Mul_1486" -> "1627 Tanh_1487" [label="[]", style=solid]; -"1627 Tanh_1487" -> "1629 Add_1489" [label="[]", style=solid]; -"1628 Constant_1488" -> "1629 Add_1489" [label="[]", style=solid]; -"1629 Add_1489" -> "1630 Mul_1490" [label="[]", style=solid]; -"1630 Mul_1490" -> "1631 QuantizeLinear_1865_1" [label="[]", style=solid]; -"1631 QuantizeLinear_1865_1" -> "1632 DequantizeLinear_1865_1" [label="[]", style=dashed]; -"1632 DequantizeLinear_1865_1" -> "1633 Shape_1491" [label="[]", style=solid]; -"1632 DequantizeLinear_1865_1" -> "1636 Shape_1494" [label="[]", style=solid]; -"1632 DequantizeLinear_1865_1" -> "1639 Shape_1497" [label="[]", style=solid]; -"1632 DequantizeLinear_1865_1" -> "1644 Reshape_1502" [label="[]", style=solid]; -"1633 Shape_1491" -> "1635 Gather_1493" [label="[-1]", style=dashed]; -"1634 Constant_1492" -> "1635 Gather_1493" [label="[]", style=dashed]; -"1635 Gather_1493" -> "1648 Unsqueeze_1504" [label="[]", style=dashed]; -"1636 Shape_1494" -> "1638 Gather_1496" [label="[-1]", style=dashed]; -"1637 Constant_1495" -> "1638 Gather_1496" [label="[]", style=dashed]; -"1638 Gather_1496" -> "1649 Unsqueeze_1505" [label="[]", style=dashed]; -"1639 Shape_1497" -> "1641 Gather_1499" [label="[-1]", style=dashed]; -"1640 Constant_1498" -> "1641 Gather_1499" [label="[]", style=dashed]; -"1641 Gather_1499" -> "1642 Unsqueeze_1500" [label="[]", style=dashed]; -"1642 Unsqueeze_1500" -> "1643 Concat_1501" [label="[1]", style=dashed]; -"1643 Concat_1501" -> "1644 Reshape_1502" [label="[2]", style=dashed]; -"1644 Reshape_1502" -> "1647 Gemm_1503" [label="[]", style=solid]; -"1645 QuantizeLinear_h.6.mlp.c_proj.weight_1" -> "1646 DequantizeLinear_h.6.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; -"1646 DequantizeLinear_h.6.mlp.c_proj.weight_1" -> "1647 Gemm_1503" [label="[3072, 768]", style=solid]; -"1647 Gemm_1503" -> "1651 Reshape_1507" [label="[]", style=solid]; -"1648 Unsqueeze_1504" -> "1650 Concat_1506" [label="[1]", style=dashed]; -"1649 Unsqueeze_1505" -> "1650 Concat_1506" [label="[1]", style=dashed]; -"1650 Concat_1506" -> "1651 Reshape_1507" [label="[3]", style=dashed]; -"1651 Reshape_1507" -> "1652 Add_1508" [label="[]", style=solid]; -"1652 Add_1508" -> "1653 ReduceMean_1509" [label="[]", style=solid]; -"1652 Add_1508" -> "1654 Sub_1510" [label="[]", style=solid]; -"1652 Add_1508" -> "1816 Add_1660" [label="[]", style=solid]; -"1653 ReduceMean_1509" -> "1654 Sub_1510" [label="[]", style=solid]; -"1654 Sub_1510" -> "1656 Pow_1512" [label="[]", style=solid]; -"1654 Sub_1510" -> "1661 Div_1517" [label="[]", style=solid]; -"1655 Constant_1511" -> "1656 Pow_1512" [label="[]", style=solid]; -"1656 Pow_1512" -> "1657 ReduceMean_1513" [label="[]", style=solid]; -"1657 ReduceMean_1513" -> "1659 Add_1515" [label="[]", style=solid]; -"1658 Constant_1514" -> "1659 Add_1515" [label="[]", style=solid]; -"1659 Add_1515" -> "1660 Sqrt_1516" [label="[]", style=solid]; -"1660 Sqrt_1516" -> "1661 Div_1517" [label="[]", style=solid]; -"1661 Div_1517" -> "1662 Mul_1518" [label="[]", style=solid]; -"1662 Mul_1518" -> "1663 Add_1519" [label="[]", style=solid]; -"1663 Add_1519" -> "1664 QuantizeLinear_1898_1" [label="[]", style=solid]; -"1664 QuantizeLinear_1898_1" -> "1665 DequantizeLinear_1898_1" [label="[]", style=dashed]; -"1665 DequantizeLinear_1898_1" -> "1666 Shape_1520" [label="[]", style=solid]; -"1665 DequantizeLinear_1898_1" -> "1669 Shape_1523" [label="[]", style=solid]; -"1665 DequantizeLinear_1898_1" -> "1672 Shape_1526" [label="[]", style=solid]; -"1665 DequantizeLinear_1898_1" -> "1677 Reshape_1531" [label="[]", style=solid]; -"1666 Shape_1520" -> "1668 Gather_1522" [label="[-1]", style=dashed]; -"1667 Constant_1521" -> "1668 Gather_1522" [label="[]", style=dashed]; -"1668 Gather_1522" -> "1681 Unsqueeze_1533" [label="[]", style=dashed]; -"1669 Shape_1523" -> "1671 Gather_1525" [label="[-1]", style=dashed]; -"1670 Constant_1524" -> "1671 Gather_1525" [label="[]", style=dashed]; -"1671 Gather_1525" -> "1682 Unsqueeze_1534" [label="[]", style=dashed]; -"1672 Shape_1526" -> "1674 Gather_1528" [label="[-1]", style=dashed]; -"1673 Constant_1527" -> "1674 Gather_1528" [label="[]", style=dashed]; -"1674 Gather_1528" -> "1675 Unsqueeze_1529" [label="[]", style=dashed]; -"1675 Unsqueeze_1529" -> "1676 Concat_1530" [label="[1]", style=dashed]; -"1676 Concat_1530" -> "1677 Reshape_1531" [label="[2]", style=dashed]; -"1677 Reshape_1531" -> "1680 Gemm_1532" [label="[]", style=solid]; -"1678 QuantizeLinear_h.7.attn.c_attn.weight_1" -> "1679 DequantizeLinear_h.7.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; -"1679 DequantizeLinear_h.7.attn.c_attn.weight_1" -> "1680 Gemm_1532" [label="[768, 2304]", style=solid]; -"1680 Gemm_1532" -> "1684 Reshape_1536" [label="[]", style=solid]; -"1681 Unsqueeze_1533" -> "1683 Concat_1535" [label="[1]", style=dashed]; -"1682 Unsqueeze_1534" -> "1683 Concat_1535" [label="[1]", style=dashed]; -"1683 Concat_1535" -> "1684 Reshape_1536" [label="[3]", style=dashed]; -"1684 Reshape_1536" -> "1685 Split_1537" [label="[]", style=solid]; -"1685 Split_1537" -> "1686 QuantizeLinear_query.15_1" [label="[]", style=solid]; -"1685 Split_1537" -> "1688 Shape_1538" [label="[]", style=solid]; -"1685 Split_1537" -> "1691 Shape_1541" [label="[]", style=solid]; -"1685 Split_1537" -> "1694 Shape_1544" [label="[]", style=solid]; -"1685 Split_1537" -> "1707 Shape_1557" [label="[]", style=solid]; -"1685 Split_1537" -> "1710 Shape_1560" [label="[]", style=solid]; -"1685 Split_1537" -> "1713 Shape_1563" [label="[]", style=solid]; -"1685 Split_1537" -> "1724 Reshape_1574" [label="[]", style=solid]; -"1685 Split_1537" -> "1728 Shape_1576" [label="[]", style=solid]; -"1685 Split_1537" -> "1731 Shape_1579" [label="[]", style=solid]; -"1685 Split_1537" -> "1734 Shape_1582" [label="[]", style=solid]; -"1685 Split_1537" -> "1745 Reshape_1593" [label="[]", style=solid]; -"1686 QuantizeLinear_query.15_1" -> "1687 DequantizeLinear_query.15_1" [label="[]", style=dashed]; -"1687 DequantizeLinear_query.15_1" -> "1705 Reshape_1555" [label="[]", style=solid]; -"1688 Shape_1538" -> "1690 Gather_1540" [label="[-1]", style=dashed]; -"1689 Constant_1539" -> "1690 Gather_1540" [label="[]", style=dashed]; -"1690 Gather_1540" -> "1701 Unsqueeze_1551" [label="[]", style=dashed]; -"1691 Shape_1541" -> "1693 Gather_1543" [label="[-1]", style=dashed]; -"1692 Constant_1542" -> "1693 Gather_1543" [label="[]", style=dashed]; -"1693 Gather_1543" -> "1702 Unsqueeze_1552" [label="[]", style=dashed]; -"1694 Shape_1544" -> "1696 Gather_1546" [label="[-1]", style=dashed]; -"1695 Constant_1545" -> "1696 Gather_1546" [label="[]", style=dashed]; -"1696 Gather_1546" -> "1698 Div_1548" [label="[]", style=dashed]; -"1697 Constant_1547" -> "1698 Div_1548" [label="[]", style=dashed]; -"1698 Div_1548" -> "1699 Cast_1549" [label="[]", style=dashed]; -"1699 Cast_1549" -> "1700 Cast_1550" [label="[]", style=dashed]; -"1700 Cast_1550" -> "1703 Unsqueeze_1553" [label="[]", style=dashed]; -"1701 Unsqueeze_1551" -> "1704 Concat_1554" [label="[1]", style=dashed]; -"1702 Unsqueeze_1552" -> "1704 Concat_1554" [label="[1]", style=dashed]; -"1703 Unsqueeze_1553" -> "1704 Concat_1554" [label="[1]", style=dashed]; -"1704 Concat_1554" -> "1705 Reshape_1555" [label="[4]", style=dashed]; -"1705 Reshape_1555" -> "1706 Transpose_1556" [label="[]", style=solid]; -"1706 Transpose_1556" -> "1751 MatMul_1599" [label="[]", style=solid]; -"1707 Shape_1557" -> "1709 Gather_1559" [label="[-1]", style=dashed]; -"1708 Constant_1558" -> "1709 Gather_1559" [label="[]", style=dashed]; -"1709 Gather_1559" -> "1720 Unsqueeze_1570" [label="[]", style=dashed]; -"1710 Shape_1560" -> "1712 Gather_1562" [label="[-1]", style=dashed]; -"1711 Constant_1561" -> "1712 Gather_1562" [label="[]", style=dashed]; -"1712 Gather_1562" -> "1721 Unsqueeze_1571" [label="[]", style=dashed]; -"1713 Shape_1563" -> "1715 Gather_1565" [label="[-1]", style=dashed]; -"1714 Constant_1564" -> "1715 Gather_1565" [label="[]", style=dashed]; -"1715 Gather_1565" -> "1717 Div_1567" [label="[]", style=dashed]; -"1716 Constant_1566" -> "1717 Div_1567" [label="[]", style=dashed]; -"1717 Div_1567" -> "1718 Cast_1568" [label="[]", style=dashed]; -"1718 Cast_1568" -> "1719 Cast_1569" [label="[]", style=dashed]; -"1719 Cast_1569" -> "1722 Unsqueeze_1572" [label="[]", style=dashed]; -"1720 Unsqueeze_1570" -> "1723 Concat_1573" [label="[1]", style=dashed]; -"1721 Unsqueeze_1571" -> "1723 Concat_1573" [label="[1]", style=dashed]; -"1722 Unsqueeze_1572" -> "1723 Concat_1573" [label="[1]", style=dashed]; -"1723 Concat_1573" -> "1724 Reshape_1574" [label="[4]", style=dashed]; -"1724 Reshape_1574" -> "1725 QuantizeLinear_1963_1" [label="[]", style=solid]; -"1724 Reshape_1574" -> "1747 Transpose_1595" [label="[]", style=solid]; -"1725 QuantizeLinear_1963_1" -> "1726 DequantizeLinear_1963_1" [label="[]", style=dashed]; -"1726 DequantizeLinear_1963_1" -> "1727 Transpose_1575" [label="[]", style=solid]; -"1727 Transpose_1575" -> "1751 MatMul_1599" [label="[]", style=solid]; -"1728 Shape_1576" -> "1730 Gather_1578" [label="[-1]", style=dashed]; -"1729 Constant_1577" -> "1730 Gather_1578" [label="[]", style=dashed]; -"1730 Gather_1578" -> "1741 Unsqueeze_1589" [label="[]", style=dashed]; -"1731 Shape_1579" -> "1733 Gather_1581" [label="[-1]", style=dashed]; -"1732 Constant_1580" -> "1733 Gather_1581" [label="[]", style=dashed]; -"1733 Gather_1581" -> "1742 Unsqueeze_1590" [label="[]", style=dashed]; -"1734 Shape_1582" -> "1736 Gather_1584" [label="[-1]", style=dashed]; -"1735 Constant_1583" -> "1736 Gather_1584" [label="[]", style=dashed]; -"1736 Gather_1584" -> "1738 Div_1586" [label="[]", style=dashed]; -"1737 Constant_1585" -> "1738 Div_1586" [label="[]", style=dashed]; -"1738 Div_1586" -> "1739 Cast_1587" [label="[]", style=dashed]; -"1739 Cast_1587" -> "1740 Cast_1588" [label="[]", style=dashed]; -"1740 Cast_1588" -> "1743 Unsqueeze_1591" [label="[]", style=dashed]; -"1741 Unsqueeze_1589" -> "1744 Concat_1592" [label="[1]", style=dashed]; -"1742 Unsqueeze_1590" -> "1744 Concat_1592" [label="[1]", style=dashed]; -"1743 Unsqueeze_1591" -> "1744 Concat_1592" [label="[1]", style=dashed]; -"1744 Concat_1592" -> "1745 Reshape_1593" [label="[4]", style=dashed]; -"1745 Reshape_1593" -> "1746 Transpose_1594" [label="[]", style=solid]; -"1746 Transpose_1594" -> "1749 Unsqueeze_1597" [label="[]", style=solid]; -"1746 Transpose_1594" -> "1775 MatMul_1623" [label="[]", style=solid]; -"1747 Transpose_1595" -> "1748 Unsqueeze_1596" [label="[]", style=solid]; -"1748 Unsqueeze_1596" -> "1750 Concat_1598" [label="[]", style=solid]; -"1749 Unsqueeze_1597" -> "1750 Concat_1598" [label="[]", style=solid]; -"1750 Concat_1598" -> "2834 nncf_model_output_8" [label="[2, 1, 12, 8, 64]", style=solid]; -"1751 MatMul_1599" -> "1753 Div_1601" [label="[]", style=solid]; -"1752 Constant_1600" -> "1753 Div_1601" [label="[]", style=solid]; -"1753 Div_1601" -> "1754 Shape_1602" [label="[]", style=solid]; -"1753 Div_1601" -> "1757 Shape_1605" [label="[]", style=solid]; -"1753 Div_1601" -> "1768 Mul_1616" [label="[]", style=solid]; -"1754 Shape_1602" -> "1756 Gather_1604" [label="[-1]", style=dashed]; -"1755 Constant_1603" -> "1756 Gather_1604" [label="[]", style=dashed]; -"1756 Gather_1604" -> "1760 Sub_1608" [label="[]", style=dashed]; -"1757 Shape_1605" -> "1759 Gather_1607" [label="[-1]", style=dashed]; -"1758 Constant_1606" -> "1759 Gather_1607" [label="[]", style=dashed]; -"1759 Gather_1607" -> "1760 Sub_1608" [label="[]", style=dashed]; -"1759 Gather_1607" -> "1762 Unsqueeze_1610" [label="[]", style=dashed]; -"1759 Gather_1607" -> "1765 Unsqueeze_1613" [label="[]", style=dashed]; -"1760 Sub_1608" -> "1761 Unsqueeze_1609" [label="[]", style=dashed]; -"1761 Unsqueeze_1609" -> "1764 Slice_1612" [label="[1]", style=dashed]; -"1762 Unsqueeze_1610" -> "1764 Slice_1612" [label="[1]", style=dashed]; -"1763 Constant_1611" -> "1764 Slice_1612" [label="[1]", style=dashed]; -"1764 Slice_1612" -> "1767 Slice_1615" [label="[]", style=solid]; -"1765 Unsqueeze_1613" -> "1767 Slice_1615" [label="[1]", style=dashed]; -"1766 Constant_1614" -> "1767 Slice_1615" [label="[1]", style=dashed]; -"1767 Slice_1615" -> "1768 Mul_1616" [label="[]", style=solid]; -"1767 Slice_1615" -> "1770 Sub_1618" [label="[]", style=solid]; -"1768 Mul_1616" -> "1773 Sub_1621" [label="[]", style=solid]; -"1769 Constant_1617" -> "1770 Sub_1618" [label="[]", style=solid]; -"1770 Sub_1618" -> "1772 Mul_1620" [label="[]", style=solid]; -"1771 Constant_1619" -> "1772 Mul_1620" [label="[]", style=solid]; -"1772 Mul_1620" -> "1773 Sub_1621" [label="[]", style=solid]; -"1773 Sub_1621" -> "1774 Softmax_1622" [label="[]", style=solid]; -"1774 Softmax_1622" -> "1775 MatMul_1623" [label="[]", style=solid]; -"1775 MatMul_1623" -> "1776 QuantizeLinear_2020_1" [label="[]", style=solid]; -"1776 QuantizeLinear_2020_1" -> "1777 DequantizeLinear_2020_1" [label="[]", style=dashed]; -"1777 DequantizeLinear_2020_1" -> "1778 Transpose_1624" [label="[]", style=solid]; -"1778 Transpose_1624" -> "1779 Shape_1625" [label="[]", style=solid]; -"1778 Transpose_1624" -> "1782 Shape_1628" [label="[]", style=solid]; -"1778 Transpose_1624" -> "1785 Shape_1631" [label="[]", style=solid]; -"1778 Transpose_1624" -> "1788 Shape_1634" [label="[]", style=solid]; -"1778 Transpose_1624" -> "1796 Reshape_1642" [label="[]", style=solid]; -"1779 Shape_1625" -> "1781 Gather_1627" [label="[-1]", style=dashed]; -"1780 Constant_1626" -> "1781 Gather_1627" [label="[]", style=dashed]; -"1781 Gather_1627" -> "1792 Unsqueeze_1638" [label="[]", style=dashed]; -"1782 Shape_1628" -> "1784 Gather_1630" [label="[-1]", style=dashed]; -"1783 Constant_1629" -> "1784 Gather_1630" [label="[]", style=dashed]; -"1784 Gather_1630" -> "1793 Unsqueeze_1639" [label="[]", style=dashed]; -"1785 Shape_1631" -> "1787 Gather_1633" [label="[-1]", style=dashed]; -"1786 Constant_1632" -> "1787 Gather_1633" [label="[]", style=dashed]; -"1787 Gather_1633" -> "1791 Mul_1637" [label="[]", style=dashed]; -"1788 Shape_1634" -> "1790 Gather_1636" [label="[-1]", style=dashed]; -"1789 Constant_1635" -> "1790 Gather_1636" [label="[]", style=dashed]; -"1790 Gather_1636" -> "1791 Mul_1637" [label="[]", style=dashed]; -"1791 Mul_1637" -> "1794 Unsqueeze_1640" [label="[]", style=dashed]; -"1792 Unsqueeze_1638" -> "1795 Concat_1641" [label="[1]", style=dashed]; -"1793 Unsqueeze_1639" -> "1795 Concat_1641" [label="[1]", style=dashed]; -"1794 Unsqueeze_1640" -> "1795 Concat_1641" [label="[1]", style=dashed]; -"1795 Concat_1641" -> "1796 Reshape_1642" [label="[3]", style=dashed]; -"1796 Reshape_1642" -> "1797 Shape_1643" [label="[]", style=solid]; -"1796 Reshape_1642" -> "1800 Shape_1646" [label="[]", style=solid]; -"1796 Reshape_1642" -> "1803 Shape_1649" [label="[]", style=solid]; -"1796 Reshape_1642" -> "1808 Reshape_1654" [label="[]", style=solid]; -"1797 Shape_1643" -> "1799 Gather_1645" [label="[-1]", style=dashed]; -"1798 Constant_1644" -> "1799 Gather_1645" [label="[]", style=dashed]; -"1799 Gather_1645" -> "1812 Unsqueeze_1656" [label="[]", style=dashed]; -"1800 Shape_1646" -> "1802 Gather_1648" [label="[-1]", style=dashed]; -"1801 Constant_1647" -> "1802 Gather_1648" [label="[]", style=dashed]; -"1802 Gather_1648" -> "1813 Unsqueeze_1657" [label="[]", style=dashed]; -"1803 Shape_1649" -> "1805 Gather_1651" [label="[-1]", style=dashed]; -"1804 Constant_1650" -> "1805 Gather_1651" [label="[]", style=dashed]; -"1805 Gather_1651" -> "1806 Unsqueeze_1652" [label="[]", style=dashed]; -"1806 Unsqueeze_1652" -> "1807 Concat_1653" [label="[1]", style=dashed]; -"1807 Concat_1653" -> "1808 Reshape_1654" [label="[2]", style=dashed]; -"1808 Reshape_1654" -> "1811 Gemm_1655" [label="[]", style=solid]; -"1809 QuantizeLinear_h.7.attn.c_proj.weight_1" -> "1810 DequantizeLinear_h.7.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; -"1810 DequantizeLinear_h.7.attn.c_proj.weight_1" -> "1811 Gemm_1655" [label="[768, 768]", style=solid]; -"1811 Gemm_1655" -> "1815 Reshape_1659" [label="[]", style=solid]; -"1812 Unsqueeze_1656" -> "1814 Concat_1658" [label="[1]", style=dashed]; -"1813 Unsqueeze_1657" -> "1814 Concat_1658" [label="[1]", style=dashed]; -"1814 Concat_1658" -> "1815 Reshape_1659" [label="[3]", style=dashed]; -"1815 Reshape_1659" -> "1816 Add_1660" [label="[]", style=solid]; -"1816 Add_1660" -> "1817 ReduceMean_1661" [label="[]", style=solid]; -"1816 Add_1660" -> "1818 Sub_1662" [label="[]", style=solid]; -"1816 Add_1660" -> "1883 Add_1719" [label="[]", style=solid]; -"1817 ReduceMean_1661" -> "1818 Sub_1662" [label="[]", style=solid]; -"1818 Sub_1662" -> "1820 Pow_1664" [label="[]", style=solid]; -"1818 Sub_1662" -> "1825 Div_1669" [label="[]", style=solid]; -"1819 Constant_1663" -> "1820 Pow_1664" [label="[]", style=solid]; -"1820 Pow_1664" -> "1821 ReduceMean_1665" [label="[]", style=solid]; -"1821 ReduceMean_1665" -> "1823 Add_1667" [label="[]", style=solid]; -"1822 Constant_1666" -> "1823 Add_1667" [label="[]", style=solid]; -"1823 Add_1667" -> "1824 Sqrt_1668" [label="[]", style=solid]; -"1824 Sqrt_1668" -> "1825 Div_1669" [label="[]", style=solid]; -"1825 Div_1669" -> "1826 Mul_1670" [label="[]", style=solid]; -"1826 Mul_1670" -> "1827 Add_1671" [label="[]", style=solid]; -"1827 Add_1671" -> "1828 QuantizeLinear_2072_1" [label="[]", style=solid]; -"1828 QuantizeLinear_2072_1" -> "1829 DequantizeLinear_2072_1" [label="[]", style=dashed]; -"1829 DequantizeLinear_2072_1" -> "1830 Shape_1672" [label="[]", style=solid]; -"1829 DequantizeLinear_2072_1" -> "1833 Shape_1675" [label="[]", style=solid]; -"1829 DequantizeLinear_2072_1" -> "1836 Shape_1678" [label="[]", style=solid]; -"1829 DequantizeLinear_2072_1" -> "1841 Reshape_1683" [label="[]", style=solid]; -"1830 Shape_1672" -> "1832 Gather_1674" [label="[-1]", style=dashed]; -"1831 Constant_1673" -> "1832 Gather_1674" [label="[]", style=dashed]; -"1832 Gather_1674" -> "1845 Unsqueeze_1685" [label="[]", style=dashed]; -"1833 Shape_1675" -> "1835 Gather_1677" [label="[-1]", style=dashed]; -"1834 Constant_1676" -> "1835 Gather_1677" [label="[]", style=dashed]; -"1835 Gather_1677" -> "1846 Unsqueeze_1686" [label="[]", style=dashed]; -"1836 Shape_1678" -> "1838 Gather_1680" [label="[-1]", style=dashed]; -"1837 Constant_1679" -> "1838 Gather_1680" [label="[]", style=dashed]; -"1838 Gather_1680" -> "1839 Unsqueeze_1681" [label="[]", style=dashed]; -"1839 Unsqueeze_1681" -> "1840 Concat_1682" [label="[1]", style=dashed]; -"1840 Concat_1682" -> "1841 Reshape_1683" [label="[2]", style=dashed]; -"1841 Reshape_1683" -> "1844 Gemm_1684" [label="[]", style=solid]; -"1842 QuantizeLinear_h.7.mlp.c_fc.weight_1" -> "1843 DequantizeLinear_h.7.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; -"1843 DequantizeLinear_h.7.mlp.c_fc.weight_1" -> "1844 Gemm_1684" [label="[768, 3072]", style=solid]; -"1844 Gemm_1684" -> "1848 Reshape_1688" [label="[]", style=solid]; -"1845 Unsqueeze_1685" -> "1847 Concat_1687" [label="[1]", style=dashed]; -"1846 Unsqueeze_1686" -> "1847 Concat_1687" [label="[1]", style=dashed]; -"1847 Concat_1687" -> "1848 Reshape_1688" [label="[3]", style=dashed]; -"1848 Reshape_1688" -> "1850 Mul_1690" [label="[]", style=solid]; -"1848 Reshape_1688" -> "1852 Pow_1692" [label="[]", style=solid]; -"1848 Reshape_1688" -> "1855 Add_1695" [label="[]", style=solid]; -"1849 Constant_1689" -> "1850 Mul_1690" [label="[]", style=solid]; -"1850 Mul_1690" -> "1861 Mul_1701" [label="[]", style=solid]; -"1851 Constant_1691" -> "1852 Pow_1692" [label="[]", style=solid]; -"1852 Pow_1692" -> "1854 Mul_1694" [label="[]", style=solid]; -"1853 Constant_1693" -> "1854 Mul_1694" [label="[]", style=solid]; -"1854 Mul_1694" -> "1855 Add_1695" [label="[]", style=solid]; -"1855 Add_1695" -> "1857 Mul_1697" [label="[]", style=solid]; -"1856 Constant_1696" -> "1857 Mul_1697" [label="[]", style=solid]; -"1857 Mul_1697" -> "1858 Tanh_1698" [label="[]", style=solid]; -"1858 Tanh_1698" -> "1860 Add_1700" [label="[]", style=solid]; -"1859 Constant_1699" -> "1860 Add_1700" [label="[]", style=solid]; -"1860 Add_1700" -> "1861 Mul_1701" [label="[]", style=solid]; -"1861 Mul_1701" -> "1862 QuantizeLinear_2106_1" [label="[]", style=solid]; -"1862 QuantizeLinear_2106_1" -> "1863 DequantizeLinear_2106_1" [label="[]", style=dashed]; -"1863 DequantizeLinear_2106_1" -> "1864 Shape_1702" [label="[]", style=solid]; -"1863 DequantizeLinear_2106_1" -> "1867 Shape_1705" [label="[]", style=solid]; -"1863 DequantizeLinear_2106_1" -> "1870 Shape_1708" [label="[]", style=solid]; -"1863 DequantizeLinear_2106_1" -> "1875 Reshape_1713" [label="[]", style=solid]; -"1864 Shape_1702" -> "1866 Gather_1704" [label="[-1]", style=dashed]; -"1865 Constant_1703" -> "1866 Gather_1704" [label="[]", style=dashed]; -"1866 Gather_1704" -> "1879 Unsqueeze_1715" [label="[]", style=dashed]; -"1867 Shape_1705" -> "1869 Gather_1707" [label="[-1]", style=dashed]; -"1868 Constant_1706" -> "1869 Gather_1707" [label="[]", style=dashed]; -"1869 Gather_1707" -> "1880 Unsqueeze_1716" [label="[]", style=dashed]; -"1870 Shape_1708" -> "1872 Gather_1710" [label="[-1]", style=dashed]; -"1871 Constant_1709" -> "1872 Gather_1710" [label="[]", style=dashed]; -"1872 Gather_1710" -> "1873 Unsqueeze_1711" [label="[]", style=dashed]; -"1873 Unsqueeze_1711" -> "1874 Concat_1712" [label="[1]", style=dashed]; -"1874 Concat_1712" -> "1875 Reshape_1713" [label="[2]", style=dashed]; -"1875 Reshape_1713" -> "1878 Gemm_1714" [label="[]", style=solid]; -"1876 QuantizeLinear_h.7.mlp.c_proj.weight_1" -> "1877 DequantizeLinear_h.7.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; -"1877 DequantizeLinear_h.7.mlp.c_proj.weight_1" -> "1878 Gemm_1714" [label="[3072, 768]", style=solid]; -"1878 Gemm_1714" -> "1882 Reshape_1718" [label="[]", style=solid]; -"1879 Unsqueeze_1715" -> "1881 Concat_1717" [label="[1]", style=dashed]; -"1880 Unsqueeze_1716" -> "1881 Concat_1717" [label="[1]", style=dashed]; -"1881 Concat_1717" -> "1882 Reshape_1718" [label="[3]", style=dashed]; -"1882 Reshape_1718" -> "1883 Add_1719" [label="[]", style=solid]; -"1883 Add_1719" -> "1884 ReduceMean_1720" [label="[]", style=solid]; -"1883 Add_1719" -> "1885 Sub_1721" [label="[]", style=solid]; -"1883 Add_1719" -> "2047 Add_1871" [label="[]", style=solid]; -"1884 ReduceMean_1720" -> "1885 Sub_1721" [label="[]", style=solid]; -"1885 Sub_1721" -> "1887 Pow_1723" [label="[]", style=solid]; -"1885 Sub_1721" -> "1892 Div_1728" [label="[]", style=solid]; -"1886 Constant_1722" -> "1887 Pow_1723" [label="[]", style=solid]; -"1887 Pow_1723" -> "1888 ReduceMean_1724" [label="[]", style=solid]; -"1888 ReduceMean_1724" -> "1890 Add_1726" [label="[]", style=solid]; -"1889 Constant_1725" -> "1890 Add_1726" [label="[]", style=solid]; -"1890 Add_1726" -> "1891 Sqrt_1727" [label="[]", style=solid]; -"1891 Sqrt_1727" -> "1892 Div_1728" [label="[]", style=solid]; -"1892 Div_1728" -> "1893 Mul_1729" [label="[]", style=solid]; -"1893 Mul_1729" -> "1894 Add_1730" [label="[]", style=solid]; -"1894 Add_1730" -> "1895 QuantizeLinear_2139_1" [label="[]", style=solid]; -"1895 QuantizeLinear_2139_1" -> "1896 DequantizeLinear_2139_1" [label="[]", style=dashed]; -"1896 DequantizeLinear_2139_1" -> "1897 Shape_1731" [label="[]", style=solid]; -"1896 DequantizeLinear_2139_1" -> "1900 Shape_1734" [label="[]", style=solid]; -"1896 DequantizeLinear_2139_1" -> "1903 Shape_1737" [label="[]", style=solid]; -"1896 DequantizeLinear_2139_1" -> "1908 Reshape_1742" [label="[]", style=solid]; -"1897 Shape_1731" -> "1899 Gather_1733" [label="[-1]", style=dashed]; -"1898 Constant_1732" -> "1899 Gather_1733" [label="[]", style=dashed]; -"1899 Gather_1733" -> "1912 Unsqueeze_1744" [label="[]", style=dashed]; -"1900 Shape_1734" -> "1902 Gather_1736" [label="[-1]", style=dashed]; -"1901 Constant_1735" -> "1902 Gather_1736" [label="[]", style=dashed]; -"1902 Gather_1736" -> "1913 Unsqueeze_1745" [label="[]", style=dashed]; -"1903 Shape_1737" -> "1905 Gather_1739" [label="[-1]", style=dashed]; -"1904 Constant_1738" -> "1905 Gather_1739" [label="[]", style=dashed]; -"1905 Gather_1739" -> "1906 Unsqueeze_1740" [label="[]", style=dashed]; -"1906 Unsqueeze_1740" -> "1907 Concat_1741" [label="[1]", style=dashed]; -"1907 Concat_1741" -> "1908 Reshape_1742" [label="[2]", style=dashed]; -"1908 Reshape_1742" -> "1911 Gemm_1743" [label="[]", style=solid]; -"1909 QuantizeLinear_h.8.attn.c_attn.weight_1" -> "1910 DequantizeLinear_h.8.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; -"1910 DequantizeLinear_h.8.attn.c_attn.weight_1" -> "1911 Gemm_1743" [label="[768, 2304]", style=solid]; -"1911 Gemm_1743" -> "1915 Reshape_1747" [label="[]", style=solid]; -"1912 Unsqueeze_1744" -> "1914 Concat_1746" [label="[1]", style=dashed]; -"1913 Unsqueeze_1745" -> "1914 Concat_1746" [label="[1]", style=dashed]; -"1914 Concat_1746" -> "1915 Reshape_1747" [label="[3]", style=dashed]; -"1915 Reshape_1747" -> "1916 Split_1748" [label="[]", style=solid]; -"1916 Split_1748" -> "1917 QuantizeLinear_query.17_1" [label="[]", style=solid]; -"1916 Split_1748" -> "1919 Shape_1749" [label="[]", style=solid]; -"1916 Split_1748" -> "1922 Shape_1752" [label="[]", style=solid]; -"1916 Split_1748" -> "1925 Shape_1755" [label="[]", style=solid]; -"1916 Split_1748" -> "1938 Shape_1768" [label="[]", style=solid]; -"1916 Split_1748" -> "1941 Shape_1771" [label="[]", style=solid]; -"1916 Split_1748" -> "1944 Shape_1774" [label="[]", style=solid]; -"1916 Split_1748" -> "1955 Reshape_1785" [label="[]", style=solid]; -"1916 Split_1748" -> "1959 Shape_1787" [label="[]", style=solid]; -"1916 Split_1748" -> "1962 Shape_1790" [label="[]", style=solid]; -"1916 Split_1748" -> "1965 Shape_1793" [label="[]", style=solid]; -"1916 Split_1748" -> "1976 Reshape_1804" [label="[]", style=solid]; -"1917 QuantizeLinear_query.17_1" -> "1918 DequantizeLinear_query.17_1" [label="[]", style=dashed]; -"1918 DequantizeLinear_query.17_1" -> "1936 Reshape_1766" [label="[]", style=solid]; -"1919 Shape_1749" -> "1921 Gather_1751" [label="[-1]", style=dashed]; -"1920 Constant_1750" -> "1921 Gather_1751" [label="[]", style=dashed]; -"1921 Gather_1751" -> "1932 Unsqueeze_1762" [label="[]", style=dashed]; -"1922 Shape_1752" -> "1924 Gather_1754" [label="[-1]", style=dashed]; -"1923 Constant_1753" -> "1924 Gather_1754" [label="[]", style=dashed]; -"1924 Gather_1754" -> "1933 Unsqueeze_1763" [label="[]", style=dashed]; -"1925 Shape_1755" -> "1927 Gather_1757" [label="[-1]", style=dashed]; -"1926 Constant_1756" -> "1927 Gather_1757" [label="[]", style=dashed]; -"1927 Gather_1757" -> "1929 Div_1759" [label="[]", style=dashed]; -"1928 Constant_1758" -> "1929 Div_1759" [label="[]", style=dashed]; -"1929 Div_1759" -> "1930 Cast_1760" [label="[]", style=dashed]; -"1930 Cast_1760" -> "1931 Cast_1761" [label="[]", style=dashed]; -"1931 Cast_1761" -> "1934 Unsqueeze_1764" [label="[]", style=dashed]; -"1932 Unsqueeze_1762" -> "1935 Concat_1765" [label="[1]", style=dashed]; -"1933 Unsqueeze_1763" -> "1935 Concat_1765" [label="[1]", style=dashed]; -"1934 Unsqueeze_1764" -> "1935 Concat_1765" [label="[1]", style=dashed]; -"1935 Concat_1765" -> "1936 Reshape_1766" [label="[4]", style=dashed]; -"1936 Reshape_1766" -> "1937 Transpose_1767" [label="[]", style=solid]; -"1937 Transpose_1767" -> "1982 MatMul_1810" [label="[]", style=solid]; -"1938 Shape_1768" -> "1940 Gather_1770" [label="[-1]", style=dashed]; -"1939 Constant_1769" -> "1940 Gather_1770" [label="[]", style=dashed]; -"1940 Gather_1770" -> "1951 Unsqueeze_1781" [label="[]", style=dashed]; -"1941 Shape_1771" -> "1943 Gather_1773" [label="[-1]", style=dashed]; -"1942 Constant_1772" -> "1943 Gather_1773" [label="[]", style=dashed]; -"1943 Gather_1773" -> "1952 Unsqueeze_1782" [label="[]", style=dashed]; -"1944 Shape_1774" -> "1946 Gather_1776" [label="[-1]", style=dashed]; -"1945 Constant_1775" -> "1946 Gather_1776" [label="[]", style=dashed]; -"1946 Gather_1776" -> "1948 Div_1778" [label="[]", style=dashed]; -"1947 Constant_1777" -> "1948 Div_1778" [label="[]", style=dashed]; -"1948 Div_1778" -> "1949 Cast_1779" [label="[]", style=dashed]; -"1949 Cast_1779" -> "1950 Cast_1780" [label="[]", style=dashed]; -"1950 Cast_1780" -> "1953 Unsqueeze_1783" [label="[]", style=dashed]; -"1951 Unsqueeze_1781" -> "1954 Concat_1784" [label="[1]", style=dashed]; -"1952 Unsqueeze_1782" -> "1954 Concat_1784" [label="[1]", style=dashed]; -"1953 Unsqueeze_1783" -> "1954 Concat_1784" [label="[1]", style=dashed]; -"1954 Concat_1784" -> "1955 Reshape_1785" [label="[4]", style=dashed]; -"1955 Reshape_1785" -> "1956 QuantizeLinear_2204_1" [label="[]", style=solid]; -"1955 Reshape_1785" -> "1978 Transpose_1806" [label="[]", style=solid]; -"1956 QuantizeLinear_2204_1" -> "1957 DequantizeLinear_2204_1" [label="[]", style=dashed]; -"1957 DequantizeLinear_2204_1" -> "1958 Transpose_1786" [label="[]", style=solid]; -"1958 Transpose_1786" -> "1982 MatMul_1810" [label="[]", style=solid]; -"1959 Shape_1787" -> "1961 Gather_1789" [label="[-1]", style=dashed]; -"1960 Constant_1788" -> "1961 Gather_1789" [label="[]", style=dashed]; -"1961 Gather_1789" -> "1972 Unsqueeze_1800" [label="[]", style=dashed]; -"1962 Shape_1790" -> "1964 Gather_1792" [label="[-1]", style=dashed]; -"1963 Constant_1791" -> "1964 Gather_1792" [label="[]", style=dashed]; -"1964 Gather_1792" -> "1973 Unsqueeze_1801" [label="[]", style=dashed]; -"1965 Shape_1793" -> "1967 Gather_1795" [label="[-1]", style=dashed]; -"1966 Constant_1794" -> "1967 Gather_1795" [label="[]", style=dashed]; -"1967 Gather_1795" -> "1969 Div_1797" [label="[]", style=dashed]; -"1968 Constant_1796" -> "1969 Div_1797" [label="[]", style=dashed]; -"1969 Div_1797" -> "1970 Cast_1798" [label="[]", style=dashed]; -"1970 Cast_1798" -> "1971 Cast_1799" [label="[]", style=dashed]; -"1971 Cast_1799" -> "1974 Unsqueeze_1802" [label="[]", style=dashed]; -"1972 Unsqueeze_1800" -> "1975 Concat_1803" [label="[1]", style=dashed]; -"1973 Unsqueeze_1801" -> "1975 Concat_1803" [label="[1]", style=dashed]; -"1974 Unsqueeze_1802" -> "1975 Concat_1803" [label="[1]", style=dashed]; -"1975 Concat_1803" -> "1976 Reshape_1804" [label="[4]", style=dashed]; -"1976 Reshape_1804" -> "1977 Transpose_1805" [label="[]", style=solid]; -"1977 Transpose_1805" -> "1980 Unsqueeze_1808" [label="[]", style=solid]; -"1977 Transpose_1805" -> "2006 MatMul_1834" [label="[]", style=solid]; -"1978 Transpose_1806" -> "1979 Unsqueeze_1807" [label="[]", style=solid]; -"1979 Unsqueeze_1807" -> "1981 Concat_1809" [label="[]", style=solid]; -"1980 Unsqueeze_1808" -> "1981 Concat_1809" [label="[]", style=solid]; -"1981 Concat_1809" -> "2835 nncf_model_output_9" [label="[2, 1, 12, 8, 64]", style=solid]; -"1982 MatMul_1810" -> "1984 Div_1812" [label="[]", style=solid]; -"1983 Constant_1811" -> "1984 Div_1812" [label="[]", style=solid]; -"1984 Div_1812" -> "1985 Shape_1813" [label="[]", style=solid]; -"1984 Div_1812" -> "1988 Shape_1816" [label="[]", style=solid]; -"1984 Div_1812" -> "1999 Mul_1827" [label="[]", style=solid]; -"1985 Shape_1813" -> "1987 Gather_1815" [label="[-1]", style=dashed]; -"1986 Constant_1814" -> "1987 Gather_1815" [label="[]", style=dashed]; -"1987 Gather_1815" -> "1991 Sub_1819" [label="[]", style=dashed]; -"1988 Shape_1816" -> "1990 Gather_1818" [label="[-1]", style=dashed]; -"1989 Constant_1817" -> "1990 Gather_1818" [label="[]", style=dashed]; -"1990 Gather_1818" -> "1991 Sub_1819" [label="[]", style=dashed]; -"1990 Gather_1818" -> "1993 Unsqueeze_1821" [label="[]", style=dashed]; -"1990 Gather_1818" -> "1996 Unsqueeze_1824" [label="[]", style=dashed]; -"1991 Sub_1819" -> "1992 Unsqueeze_1820" [label="[]", style=dashed]; -"1992 Unsqueeze_1820" -> "1995 Slice_1823" [label="[1]", style=dashed]; -"1993 Unsqueeze_1821" -> "1995 Slice_1823" [label="[1]", style=dashed]; -"1994 Constant_1822" -> "1995 Slice_1823" [label="[1]", style=dashed]; -"1995 Slice_1823" -> "1998 Slice_1826" [label="[]", style=solid]; -"1996 Unsqueeze_1824" -> "1998 Slice_1826" [label="[1]", style=dashed]; -"1997 Constant_1825" -> "1998 Slice_1826" [label="[1]", style=dashed]; -"1998 Slice_1826" -> "1999 Mul_1827" [label="[]", style=solid]; -"1998 Slice_1826" -> "2001 Sub_1829" [label="[]", style=solid]; -"1999 Mul_1827" -> "2004 Sub_1832" [label="[]", style=solid]; -"2000 Constant_1828" -> "2001 Sub_1829" [label="[]", style=solid]; -"2001 Sub_1829" -> "2003 Mul_1831" [label="[]", style=solid]; -"2002 Constant_1830" -> "2003 Mul_1831" [label="[]", style=solid]; -"2003 Mul_1831" -> "2004 Sub_1832" [label="[]", style=solid]; -"2004 Sub_1832" -> "2005 Softmax_1833" [label="[]", style=solid]; -"2005 Softmax_1833" -> "2006 MatMul_1834" [label="[]", style=solid]; -"2006 MatMul_1834" -> "2007 QuantizeLinear_2261_1" [label="[]", style=solid]; -"2007 QuantizeLinear_2261_1" -> "2008 DequantizeLinear_2261_1" [label="[]", style=dashed]; -"2008 DequantizeLinear_2261_1" -> "2009 Transpose_1835" [label="[]", style=solid]; -"2009 Transpose_1835" -> "2010 Shape_1836" [label="[]", style=solid]; -"2009 Transpose_1835" -> "2013 Shape_1839" [label="[]", style=solid]; -"2009 Transpose_1835" -> "2016 Shape_1842" [label="[]", style=solid]; -"2009 Transpose_1835" -> "2019 Shape_1845" [label="[]", style=solid]; -"2009 Transpose_1835" -> "2027 Reshape_1853" [label="[]", style=solid]; -"2010 Shape_1836" -> "2012 Gather_1838" [label="[-1]", style=dashed]; -"2011 Constant_1837" -> "2012 Gather_1838" [label="[]", style=dashed]; -"2012 Gather_1838" -> "2023 Unsqueeze_1849" [label="[]", style=dashed]; -"2013 Shape_1839" -> "2015 Gather_1841" [label="[-1]", style=dashed]; -"2014 Constant_1840" -> "2015 Gather_1841" [label="[]", style=dashed]; -"2015 Gather_1841" -> "2024 Unsqueeze_1850" [label="[]", style=dashed]; -"2016 Shape_1842" -> "2018 Gather_1844" [label="[-1]", style=dashed]; -"2017 Constant_1843" -> "2018 Gather_1844" [label="[]", style=dashed]; -"2018 Gather_1844" -> "2022 Mul_1848" [label="[]", style=dashed]; -"2019 Shape_1845" -> "2021 Gather_1847" [label="[-1]", style=dashed]; -"2020 Constant_1846" -> "2021 Gather_1847" [label="[]", style=dashed]; -"2021 Gather_1847" -> "2022 Mul_1848" [label="[]", style=dashed]; -"2022 Mul_1848" -> "2025 Unsqueeze_1851" [label="[]", style=dashed]; -"2023 Unsqueeze_1849" -> "2026 Concat_1852" [label="[1]", style=dashed]; -"2024 Unsqueeze_1850" -> "2026 Concat_1852" [label="[1]", style=dashed]; -"2025 Unsqueeze_1851" -> "2026 Concat_1852" [label="[1]", style=dashed]; -"2026 Concat_1852" -> "2027 Reshape_1853" [label="[3]", style=dashed]; -"2027 Reshape_1853" -> "2028 Shape_1854" [label="[]", style=solid]; -"2027 Reshape_1853" -> "2031 Shape_1857" [label="[]", style=solid]; -"2027 Reshape_1853" -> "2034 Shape_1860" [label="[]", style=solid]; -"2027 Reshape_1853" -> "2039 Reshape_1865" [label="[]", style=solid]; -"2028 Shape_1854" -> "2030 Gather_1856" [label="[-1]", style=dashed]; -"2029 Constant_1855" -> "2030 Gather_1856" [label="[]", style=dashed]; -"2030 Gather_1856" -> "2043 Unsqueeze_1867" [label="[]", style=dashed]; -"2031 Shape_1857" -> "2033 Gather_1859" [label="[-1]", style=dashed]; -"2032 Constant_1858" -> "2033 Gather_1859" [label="[]", style=dashed]; -"2033 Gather_1859" -> "2044 Unsqueeze_1868" [label="[]", style=dashed]; -"2034 Shape_1860" -> "2036 Gather_1862" [label="[-1]", style=dashed]; -"2035 Constant_1861" -> "2036 Gather_1862" [label="[]", style=dashed]; -"2036 Gather_1862" -> "2037 Unsqueeze_1863" [label="[]", style=dashed]; -"2037 Unsqueeze_1863" -> "2038 Concat_1864" [label="[1]", style=dashed]; -"2038 Concat_1864" -> "2039 Reshape_1865" [label="[2]", style=dashed]; -"2039 Reshape_1865" -> "2042 Gemm_1866" [label="[]", style=solid]; -"2040 QuantizeLinear_h.8.attn.c_proj.weight_1" -> "2041 DequantizeLinear_h.8.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; -"2041 DequantizeLinear_h.8.attn.c_proj.weight_1" -> "2042 Gemm_1866" [label="[768, 768]", style=solid]; -"2042 Gemm_1866" -> "2046 Reshape_1870" [label="[]", style=solid]; -"2043 Unsqueeze_1867" -> "2045 Concat_1869" [label="[1]", style=dashed]; -"2044 Unsqueeze_1868" -> "2045 Concat_1869" [label="[1]", style=dashed]; -"2045 Concat_1869" -> "2046 Reshape_1870" [label="[3]", style=dashed]; -"2046 Reshape_1870" -> "2047 Add_1871" [label="[]", style=solid]; -"2047 Add_1871" -> "2048 ReduceMean_1872" [label="[]", style=solid]; -"2047 Add_1871" -> "2049 Sub_1873" [label="[]", style=solid]; -"2047 Add_1871" -> "2114 Add_1930" [label="[]", style=solid]; -"2048 ReduceMean_1872" -> "2049 Sub_1873" [label="[]", style=solid]; -"2049 Sub_1873" -> "2051 Pow_1875" [label="[]", style=solid]; -"2049 Sub_1873" -> "2056 Div_1880" [label="[]", style=solid]; -"2050 Constant_1874" -> "2051 Pow_1875" [label="[]", style=solid]; -"2051 Pow_1875" -> "2052 ReduceMean_1876" [label="[]", style=solid]; -"2052 ReduceMean_1876" -> "2054 Add_1878" [label="[]", style=solid]; -"2053 Constant_1877" -> "2054 Add_1878" [label="[]", style=solid]; -"2054 Add_1878" -> "2055 Sqrt_1879" [label="[]", style=solid]; -"2055 Sqrt_1879" -> "2056 Div_1880" [label="[]", style=solid]; -"2056 Div_1880" -> "2057 Mul_1881" [label="[]", style=solid]; -"2057 Mul_1881" -> "2058 Add_1882" [label="[]", style=solid]; -"2058 Add_1882" -> "2059 QuantizeLinear_2313_1" [label="[]", style=solid]; -"2059 QuantizeLinear_2313_1" -> "2060 DequantizeLinear_2313_1" [label="[]", style=dashed]; -"2060 DequantizeLinear_2313_1" -> "2061 Shape_1883" [label="[]", style=solid]; -"2060 DequantizeLinear_2313_1" -> "2064 Shape_1886" [label="[]", style=solid]; -"2060 DequantizeLinear_2313_1" -> "2067 Shape_1889" [label="[]", style=solid]; -"2060 DequantizeLinear_2313_1" -> "2072 Reshape_1894" [label="[]", style=solid]; -"2061 Shape_1883" -> "2063 Gather_1885" [label="[-1]", style=dashed]; -"2062 Constant_1884" -> "2063 Gather_1885" [label="[]", style=dashed]; -"2063 Gather_1885" -> "2076 Unsqueeze_1896" [label="[]", style=dashed]; -"2064 Shape_1886" -> "2066 Gather_1888" [label="[-1]", style=dashed]; -"2065 Constant_1887" -> "2066 Gather_1888" [label="[]", style=dashed]; -"2066 Gather_1888" -> "2077 Unsqueeze_1897" [label="[]", style=dashed]; -"2067 Shape_1889" -> "2069 Gather_1891" [label="[-1]", style=dashed]; -"2068 Constant_1890" -> "2069 Gather_1891" [label="[]", style=dashed]; -"2069 Gather_1891" -> "2070 Unsqueeze_1892" [label="[]", style=dashed]; -"2070 Unsqueeze_1892" -> "2071 Concat_1893" [label="[1]", style=dashed]; -"2071 Concat_1893" -> "2072 Reshape_1894" [label="[2]", style=dashed]; -"2072 Reshape_1894" -> "2075 Gemm_1895" [label="[]", style=solid]; -"2073 QuantizeLinear_h.8.mlp.c_fc.weight_1" -> "2074 DequantizeLinear_h.8.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; -"2074 DequantizeLinear_h.8.mlp.c_fc.weight_1" -> "2075 Gemm_1895" [label="[768, 3072]", style=solid]; -"2075 Gemm_1895" -> "2079 Reshape_1899" [label="[]", style=solid]; -"2076 Unsqueeze_1896" -> "2078 Concat_1898" [label="[1]", style=dashed]; -"2077 Unsqueeze_1897" -> "2078 Concat_1898" [label="[1]", style=dashed]; -"2078 Concat_1898" -> "2079 Reshape_1899" [label="[3]", style=dashed]; -"2079 Reshape_1899" -> "2081 Mul_1901" [label="[]", style=solid]; -"2079 Reshape_1899" -> "2083 Pow_1903" [label="[]", style=solid]; -"2079 Reshape_1899" -> "2086 Add_1906" [label="[]", style=solid]; -"2080 Constant_1900" -> "2081 Mul_1901" [label="[]", style=solid]; -"2081 Mul_1901" -> "2092 Mul_1912" [label="[]", style=solid]; -"2082 Constant_1902" -> "2083 Pow_1903" [label="[]", style=solid]; -"2083 Pow_1903" -> "2085 Mul_1905" [label="[]", style=solid]; -"2084 Constant_1904" -> "2085 Mul_1905" [label="[]", style=solid]; -"2085 Mul_1905" -> "2086 Add_1906" [label="[]", style=solid]; -"2086 Add_1906" -> "2088 Mul_1908" [label="[]", style=solid]; -"2087 Constant_1907" -> "2088 Mul_1908" [label="[]", style=solid]; -"2088 Mul_1908" -> "2089 Tanh_1909" [label="[]", style=solid]; -"2089 Tanh_1909" -> "2091 Add_1911" [label="[]", style=solid]; -"2090 Constant_1910" -> "2091 Add_1911" [label="[]", style=solid]; -"2091 Add_1911" -> "2092 Mul_1912" [label="[]", style=solid]; -"2092 Mul_1912" -> "2093 QuantizeLinear_2347_1" [label="[]", style=solid]; -"2093 QuantizeLinear_2347_1" -> "2094 DequantizeLinear_2347_1" [label="[]", style=dashed]; -"2094 DequantizeLinear_2347_1" -> "2095 Shape_1913" [label="[]", style=solid]; -"2094 DequantizeLinear_2347_1" -> "2098 Shape_1916" [label="[]", style=solid]; -"2094 DequantizeLinear_2347_1" -> "2101 Shape_1919" [label="[]", style=solid]; -"2094 DequantizeLinear_2347_1" -> "2106 Reshape_1924" [label="[]", style=solid]; -"2095 Shape_1913" -> "2097 Gather_1915" [label="[-1]", style=dashed]; -"2096 Constant_1914" -> "2097 Gather_1915" [label="[]", style=dashed]; -"2097 Gather_1915" -> "2110 Unsqueeze_1926" [label="[]", style=dashed]; -"2098 Shape_1916" -> "2100 Gather_1918" [label="[-1]", style=dashed]; -"2099 Constant_1917" -> "2100 Gather_1918" [label="[]", style=dashed]; -"2100 Gather_1918" -> "2111 Unsqueeze_1927" [label="[]", style=dashed]; -"2101 Shape_1919" -> "2103 Gather_1921" [label="[-1]", style=dashed]; -"2102 Constant_1920" -> "2103 Gather_1921" [label="[]", style=dashed]; -"2103 Gather_1921" -> "2104 Unsqueeze_1922" [label="[]", style=dashed]; -"2104 Unsqueeze_1922" -> "2105 Concat_1923" [label="[1]", style=dashed]; -"2105 Concat_1923" -> "2106 Reshape_1924" [label="[2]", style=dashed]; -"2106 Reshape_1924" -> "2109 Gemm_1925" [label="[]", style=solid]; -"2107 QuantizeLinear_h.8.mlp.c_proj.weight_1" -> "2108 DequantizeLinear_h.8.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; -"2108 DequantizeLinear_h.8.mlp.c_proj.weight_1" -> "2109 Gemm_1925" [label="[3072, 768]", style=solid]; -"2109 Gemm_1925" -> "2113 Reshape_1929" [label="[]", style=solid]; -"2110 Unsqueeze_1926" -> "2112 Concat_1928" [label="[1]", style=dashed]; -"2111 Unsqueeze_1927" -> "2112 Concat_1928" [label="[1]", style=dashed]; -"2112 Concat_1928" -> "2113 Reshape_1929" [label="[3]", style=dashed]; -"2113 Reshape_1929" -> "2114 Add_1930" [label="[]", style=solid]; -"2114 Add_1930" -> "2115 ReduceMean_1931" [label="[]", style=solid]; -"2114 Add_1930" -> "2116 Sub_1932" [label="[]", style=solid]; -"2114 Add_1930" -> "2278 Add_2082" [label="[]", style=solid]; -"2115 ReduceMean_1931" -> "2116 Sub_1932" [label="[]", style=solid]; -"2116 Sub_1932" -> "2118 Pow_1934" [label="[]", style=solid]; -"2116 Sub_1932" -> "2123 Div_1939" [label="[]", style=solid]; -"2117 Constant_1933" -> "2118 Pow_1934" [label="[]", style=solid]; -"2118 Pow_1934" -> "2119 ReduceMean_1935" [label="[]", style=solid]; -"2119 ReduceMean_1935" -> "2121 Add_1937" [label="[]", style=solid]; -"2120 Constant_1936" -> "2121 Add_1937" [label="[]", style=solid]; -"2121 Add_1937" -> "2122 Sqrt_1938" [label="[]", style=solid]; -"2122 Sqrt_1938" -> "2123 Div_1939" [label="[]", style=solid]; -"2123 Div_1939" -> "2124 Mul_1940" [label="[]", style=solid]; -"2124 Mul_1940" -> "2125 Add_1941" [label="[]", style=solid]; -"2125 Add_1941" -> "2126 QuantizeLinear_2380_1" [label="[]", style=solid]; -"2126 QuantizeLinear_2380_1" -> "2127 DequantizeLinear_2380_1" [label="[]", style=dashed]; -"2127 DequantizeLinear_2380_1" -> "2128 Shape_1942" [label="[]", style=solid]; -"2127 DequantizeLinear_2380_1" -> "2131 Shape_1945" [label="[]", style=solid]; -"2127 DequantizeLinear_2380_1" -> "2134 Shape_1948" [label="[]", style=solid]; -"2127 DequantizeLinear_2380_1" -> "2139 Reshape_1953" [label="[]", style=solid]; -"2128 Shape_1942" -> "2130 Gather_1944" [label="[-1]", style=dashed]; -"2129 Constant_1943" -> "2130 Gather_1944" [label="[]", style=dashed]; -"2130 Gather_1944" -> "2143 Unsqueeze_1955" [label="[]", style=dashed]; -"2131 Shape_1945" -> "2133 Gather_1947" [label="[-1]", style=dashed]; -"2132 Constant_1946" -> "2133 Gather_1947" [label="[]", style=dashed]; -"2133 Gather_1947" -> "2144 Unsqueeze_1956" [label="[]", style=dashed]; -"2134 Shape_1948" -> "2136 Gather_1950" [label="[-1]", style=dashed]; -"2135 Constant_1949" -> "2136 Gather_1950" [label="[]", style=dashed]; -"2136 Gather_1950" -> "2137 Unsqueeze_1951" [label="[]", style=dashed]; -"2137 Unsqueeze_1951" -> "2138 Concat_1952" [label="[1]", style=dashed]; -"2138 Concat_1952" -> "2139 Reshape_1953" [label="[2]", style=dashed]; -"2139 Reshape_1953" -> "2142 Gemm_1954" [label="[]", style=solid]; -"2140 QuantizeLinear_h.9.attn.c_attn.weight_1" -> "2141 DequantizeLinear_h.9.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; -"2141 DequantizeLinear_h.9.attn.c_attn.weight_1" -> "2142 Gemm_1954" [label="[768, 2304]", style=solid]; -"2142 Gemm_1954" -> "2146 Reshape_1958" [label="[]", style=solid]; -"2143 Unsqueeze_1955" -> "2145 Concat_1957" [label="[1]", style=dashed]; -"2144 Unsqueeze_1956" -> "2145 Concat_1957" [label="[1]", style=dashed]; -"2145 Concat_1957" -> "2146 Reshape_1958" [label="[3]", style=dashed]; -"2146 Reshape_1958" -> "2147 Split_1959" [label="[]", style=solid]; -"2147 Split_1959" -> "2148 QuantizeLinear_query.19_1" [label="[]", style=solid]; -"2147 Split_1959" -> "2150 Shape_1960" [label="[]", style=solid]; -"2147 Split_1959" -> "2153 Shape_1963" [label="[]", style=solid]; -"2147 Split_1959" -> "2156 Shape_1966" [label="[]", style=solid]; -"2147 Split_1959" -> "2169 Shape_1979" [label="[]", style=solid]; -"2147 Split_1959" -> "2172 Shape_1982" [label="[]", style=solid]; -"2147 Split_1959" -> "2175 Shape_1985" [label="[]", style=solid]; -"2147 Split_1959" -> "2186 Reshape_1996" [label="[]", style=solid]; -"2147 Split_1959" -> "2190 Shape_1998" [label="[]", style=solid]; -"2147 Split_1959" -> "2193 Shape_2001" [label="[]", style=solid]; -"2147 Split_1959" -> "2196 Shape_2004" [label="[]", style=solid]; -"2147 Split_1959" -> "2207 Reshape_2015" [label="[]", style=solid]; -"2148 QuantizeLinear_query.19_1" -> "2149 DequantizeLinear_query.19_1" [label="[]", style=dashed]; -"2149 DequantizeLinear_query.19_1" -> "2167 Reshape_1977" [label="[]", style=solid]; -"2150 Shape_1960" -> "2152 Gather_1962" [label="[-1]", style=dashed]; -"2151 Constant_1961" -> "2152 Gather_1962" [label="[]", style=dashed]; -"2152 Gather_1962" -> "2163 Unsqueeze_1973" [label="[]", style=dashed]; -"2153 Shape_1963" -> "2155 Gather_1965" [label="[-1]", style=dashed]; -"2154 Constant_1964" -> "2155 Gather_1965" [label="[]", style=dashed]; -"2155 Gather_1965" -> "2164 Unsqueeze_1974" [label="[]", style=dashed]; -"2156 Shape_1966" -> "2158 Gather_1968" [label="[-1]", style=dashed]; -"2157 Constant_1967" -> "2158 Gather_1968" [label="[]", style=dashed]; -"2158 Gather_1968" -> "2160 Div_1970" [label="[]", style=dashed]; -"2159 Constant_1969" -> "2160 Div_1970" [label="[]", style=dashed]; -"2160 Div_1970" -> "2161 Cast_1971" [label="[]", style=dashed]; -"2161 Cast_1971" -> "2162 Cast_1972" [label="[]", style=dashed]; -"2162 Cast_1972" -> "2165 Unsqueeze_1975" [label="[]", style=dashed]; -"2163 Unsqueeze_1973" -> "2166 Concat_1976" [label="[1]", style=dashed]; -"2164 Unsqueeze_1974" -> "2166 Concat_1976" [label="[1]", style=dashed]; -"2165 Unsqueeze_1975" -> "2166 Concat_1976" [label="[1]", style=dashed]; -"2166 Concat_1976" -> "2167 Reshape_1977" [label="[4]", style=dashed]; -"2167 Reshape_1977" -> "2168 Transpose_1978" [label="[]", style=solid]; -"2168 Transpose_1978" -> "2213 MatMul_2021" [label="[]", style=solid]; -"2169 Shape_1979" -> "2171 Gather_1981" [label="[-1]", style=dashed]; -"2170 Constant_1980" -> "2171 Gather_1981" [label="[]", style=dashed]; -"2171 Gather_1981" -> "2182 Unsqueeze_1992" [label="[]", style=dashed]; -"2172 Shape_1982" -> "2174 Gather_1984" [label="[-1]", style=dashed]; -"2173 Constant_1983" -> "2174 Gather_1984" [label="[]", style=dashed]; -"2174 Gather_1984" -> "2183 Unsqueeze_1993" [label="[]", style=dashed]; -"2175 Shape_1985" -> "2177 Gather_1987" [label="[-1]", style=dashed]; -"2176 Constant_1986" -> "2177 Gather_1987" [label="[]", style=dashed]; -"2177 Gather_1987" -> "2179 Div_1989" [label="[]", style=dashed]; -"2178 Constant_1988" -> "2179 Div_1989" [label="[]", style=dashed]; -"2179 Div_1989" -> "2180 Cast_1990" [label="[]", style=dashed]; -"2180 Cast_1990" -> "2181 Cast_1991" [label="[]", style=dashed]; -"2181 Cast_1991" -> "2184 Unsqueeze_1994" [label="[]", style=dashed]; -"2182 Unsqueeze_1992" -> "2185 Concat_1995" [label="[1]", style=dashed]; -"2183 Unsqueeze_1993" -> "2185 Concat_1995" [label="[1]", style=dashed]; -"2184 Unsqueeze_1994" -> "2185 Concat_1995" [label="[1]", style=dashed]; -"2185 Concat_1995" -> "2186 Reshape_1996" [label="[4]", style=dashed]; -"2186 Reshape_1996" -> "2187 QuantizeLinear_2445_1" [label="[]", style=solid]; -"2186 Reshape_1996" -> "2209 Transpose_2017" [label="[]", style=solid]; -"2187 QuantizeLinear_2445_1" -> "2188 DequantizeLinear_2445_1" [label="[]", style=dashed]; -"2188 DequantizeLinear_2445_1" -> "2189 Transpose_1997" [label="[]", style=solid]; -"2189 Transpose_1997" -> "2213 MatMul_2021" [label="[]", style=solid]; -"2190 Shape_1998" -> "2192 Gather_2000" [label="[-1]", style=dashed]; -"2191 Constant_1999" -> "2192 Gather_2000" [label="[]", style=dashed]; -"2192 Gather_2000" -> "2203 Unsqueeze_2011" [label="[]", style=dashed]; -"2193 Shape_2001" -> "2195 Gather_2003" [label="[-1]", style=dashed]; -"2194 Constant_2002" -> "2195 Gather_2003" [label="[]", style=dashed]; -"2195 Gather_2003" -> "2204 Unsqueeze_2012" [label="[]", style=dashed]; -"2196 Shape_2004" -> "2198 Gather_2006" [label="[-1]", style=dashed]; -"2197 Constant_2005" -> "2198 Gather_2006" [label="[]", style=dashed]; -"2198 Gather_2006" -> "2200 Div_2008" [label="[]", style=dashed]; -"2199 Constant_2007" -> "2200 Div_2008" [label="[]", style=dashed]; -"2200 Div_2008" -> "2201 Cast_2009" [label="[]", style=dashed]; -"2201 Cast_2009" -> "2202 Cast_2010" [label="[]", style=dashed]; -"2202 Cast_2010" -> "2205 Unsqueeze_2013" [label="[]", style=dashed]; -"2203 Unsqueeze_2011" -> "2206 Concat_2014" [label="[1]", style=dashed]; -"2204 Unsqueeze_2012" -> "2206 Concat_2014" [label="[1]", style=dashed]; -"2205 Unsqueeze_2013" -> "2206 Concat_2014" [label="[1]", style=dashed]; -"2206 Concat_2014" -> "2207 Reshape_2015" [label="[4]", style=dashed]; -"2207 Reshape_2015" -> "2208 Transpose_2016" [label="[]", style=solid]; -"2208 Transpose_2016" -> "2211 Unsqueeze_2019" [label="[]", style=solid]; -"2208 Transpose_2016" -> "2237 MatMul_2045" [label="[]", style=solid]; -"2209 Transpose_2017" -> "2210 Unsqueeze_2018" [label="[]", style=solid]; -"2210 Unsqueeze_2018" -> "2212 Concat_2020" [label="[]", style=solid]; -"2211 Unsqueeze_2019" -> "2212 Concat_2020" [label="[]", style=solid]; -"2212 Concat_2020" -> "2836 nncf_model_output_10" [label="[2, 1, 12, 8, 64]", style=solid]; -"2213 MatMul_2021" -> "2215 Div_2023" [label="[]", style=solid]; -"2214 Constant_2022" -> "2215 Div_2023" [label="[]", style=solid]; -"2215 Div_2023" -> "2216 Shape_2024" [label="[]", style=solid]; -"2215 Div_2023" -> "2219 Shape_2027" [label="[]", style=solid]; -"2215 Div_2023" -> "2230 Mul_2038" [label="[]", style=solid]; -"2216 Shape_2024" -> "2218 Gather_2026" [label="[-1]", style=dashed]; -"2217 Constant_2025" -> "2218 Gather_2026" [label="[]", style=dashed]; -"2218 Gather_2026" -> "2222 Sub_2030" [label="[]", style=dashed]; -"2219 Shape_2027" -> "2221 Gather_2029" [label="[-1]", style=dashed]; -"2220 Constant_2028" -> "2221 Gather_2029" [label="[]", style=dashed]; -"2221 Gather_2029" -> "2222 Sub_2030" [label="[]", style=dashed]; -"2221 Gather_2029" -> "2224 Unsqueeze_2032" [label="[]", style=dashed]; -"2221 Gather_2029" -> "2227 Unsqueeze_2035" [label="[]", style=dashed]; -"2222 Sub_2030" -> "2223 Unsqueeze_2031" [label="[]", style=dashed]; -"2223 Unsqueeze_2031" -> "2226 Slice_2034" [label="[1]", style=dashed]; -"2224 Unsqueeze_2032" -> "2226 Slice_2034" [label="[1]", style=dashed]; -"2225 Constant_2033" -> "2226 Slice_2034" [label="[1]", style=dashed]; -"2226 Slice_2034" -> "2229 Slice_2037" [label="[]", style=solid]; -"2227 Unsqueeze_2035" -> "2229 Slice_2037" [label="[1]", style=dashed]; -"2228 Constant_2036" -> "2229 Slice_2037" [label="[1]", style=dashed]; -"2229 Slice_2037" -> "2230 Mul_2038" [label="[]", style=solid]; -"2229 Slice_2037" -> "2232 Sub_2040" [label="[]", style=solid]; -"2230 Mul_2038" -> "2235 Sub_2043" [label="[]", style=solid]; -"2231 Constant_2039" -> "2232 Sub_2040" [label="[]", style=solid]; -"2232 Sub_2040" -> "2234 Mul_2042" [label="[]", style=solid]; -"2233 Constant_2041" -> "2234 Mul_2042" [label="[]", style=solid]; -"2234 Mul_2042" -> "2235 Sub_2043" [label="[]", style=solid]; -"2235 Sub_2043" -> "2236 Softmax_2044" [label="[]", style=solid]; -"2236 Softmax_2044" -> "2237 MatMul_2045" [label="[]", style=solid]; -"2237 MatMul_2045" -> "2238 QuantizeLinear_2502_1" [label="[]", style=solid]; -"2238 QuantizeLinear_2502_1" -> "2239 DequantizeLinear_2502_1" [label="[]", style=dashed]; -"2239 DequantizeLinear_2502_1" -> "2240 Transpose_2046" [label="[]", style=solid]; -"2240 Transpose_2046" -> "2241 Shape_2047" [label="[]", style=solid]; -"2240 Transpose_2046" -> "2244 Shape_2050" [label="[]", style=solid]; -"2240 Transpose_2046" -> "2247 Shape_2053" [label="[]", style=solid]; -"2240 Transpose_2046" -> "2250 Shape_2056" [label="[]", style=solid]; -"2240 Transpose_2046" -> "2258 Reshape_2064" [label="[]", style=solid]; -"2241 Shape_2047" -> "2243 Gather_2049" [label="[-1]", style=dashed]; -"2242 Constant_2048" -> "2243 Gather_2049" [label="[]", style=dashed]; -"2243 Gather_2049" -> "2254 Unsqueeze_2060" [label="[]", style=dashed]; -"2244 Shape_2050" -> "2246 Gather_2052" [label="[-1]", style=dashed]; -"2245 Constant_2051" -> "2246 Gather_2052" [label="[]", style=dashed]; -"2246 Gather_2052" -> "2255 Unsqueeze_2061" [label="[]", style=dashed]; -"2247 Shape_2053" -> "2249 Gather_2055" [label="[-1]", style=dashed]; -"2248 Constant_2054" -> "2249 Gather_2055" [label="[]", style=dashed]; -"2249 Gather_2055" -> "2253 Mul_2059" [label="[]", style=dashed]; -"2250 Shape_2056" -> "2252 Gather_2058" [label="[-1]", style=dashed]; -"2251 Constant_2057" -> "2252 Gather_2058" [label="[]", style=dashed]; -"2252 Gather_2058" -> "2253 Mul_2059" [label="[]", style=dashed]; -"2253 Mul_2059" -> "2256 Unsqueeze_2062" [label="[]", style=dashed]; -"2254 Unsqueeze_2060" -> "2257 Concat_2063" [label="[1]", style=dashed]; -"2255 Unsqueeze_2061" -> "2257 Concat_2063" [label="[1]", style=dashed]; -"2256 Unsqueeze_2062" -> "2257 Concat_2063" [label="[1]", style=dashed]; -"2257 Concat_2063" -> "2258 Reshape_2064" [label="[3]", style=dashed]; -"2258 Reshape_2064" -> "2259 Shape_2065" [label="[]", style=solid]; -"2258 Reshape_2064" -> "2262 Shape_2068" [label="[]", style=solid]; -"2258 Reshape_2064" -> "2265 Shape_2071" [label="[]", style=solid]; -"2258 Reshape_2064" -> "2270 Reshape_2076" [label="[]", style=solid]; -"2259 Shape_2065" -> "2261 Gather_2067" [label="[-1]", style=dashed]; -"2260 Constant_2066" -> "2261 Gather_2067" [label="[]", style=dashed]; -"2261 Gather_2067" -> "2274 Unsqueeze_2078" [label="[]", style=dashed]; -"2262 Shape_2068" -> "2264 Gather_2070" [label="[-1]", style=dashed]; -"2263 Constant_2069" -> "2264 Gather_2070" [label="[]", style=dashed]; -"2264 Gather_2070" -> "2275 Unsqueeze_2079" [label="[]", style=dashed]; -"2265 Shape_2071" -> "2267 Gather_2073" [label="[-1]", style=dashed]; -"2266 Constant_2072" -> "2267 Gather_2073" [label="[]", style=dashed]; -"2267 Gather_2073" -> "2268 Unsqueeze_2074" [label="[]", style=dashed]; -"2268 Unsqueeze_2074" -> "2269 Concat_2075" [label="[1]", style=dashed]; -"2269 Concat_2075" -> "2270 Reshape_2076" [label="[2]", style=dashed]; -"2270 Reshape_2076" -> "2273 Gemm_2077" [label="[]", style=solid]; -"2271 QuantizeLinear_h.9.attn.c_proj.weight_1" -> "2272 DequantizeLinear_h.9.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; -"2272 DequantizeLinear_h.9.attn.c_proj.weight_1" -> "2273 Gemm_2077" [label="[768, 768]", style=solid]; -"2273 Gemm_2077" -> "2277 Reshape_2081" [label="[]", style=solid]; -"2274 Unsqueeze_2078" -> "2276 Concat_2080" [label="[1]", style=dashed]; -"2275 Unsqueeze_2079" -> "2276 Concat_2080" [label="[1]", style=dashed]; -"2276 Concat_2080" -> "2277 Reshape_2081" [label="[3]", style=dashed]; -"2277 Reshape_2081" -> "2278 Add_2082" [label="[]", style=solid]; -"2278 Add_2082" -> "2279 ReduceMean_2083" [label="[]", style=solid]; -"2278 Add_2082" -> "2280 Sub_2084" [label="[]", style=solid]; -"2278 Add_2082" -> "2345 Add_2141" [label="[]", style=solid]; -"2279 ReduceMean_2083" -> "2280 Sub_2084" [label="[]", style=solid]; -"2280 Sub_2084" -> "2282 Pow_2086" [label="[]", style=solid]; -"2280 Sub_2084" -> "2287 Div_2091" [label="[]", style=solid]; -"2281 Constant_2085" -> "2282 Pow_2086" [label="[]", style=solid]; -"2282 Pow_2086" -> "2283 ReduceMean_2087" [label="[]", style=solid]; -"2283 ReduceMean_2087" -> "2285 Add_2089" [label="[]", style=solid]; -"2284 Constant_2088" -> "2285 Add_2089" [label="[]", style=solid]; -"2285 Add_2089" -> "2286 Sqrt_2090" [label="[]", style=solid]; -"2286 Sqrt_2090" -> "2287 Div_2091" [label="[]", style=solid]; -"2287 Div_2091" -> "2288 Mul_2092" [label="[]", style=solid]; -"2288 Mul_2092" -> "2289 Add_2093" [label="[]", style=solid]; -"2289 Add_2093" -> "2290 QuantizeLinear_2554_1" [label="[]", style=solid]; -"2290 QuantizeLinear_2554_1" -> "2291 DequantizeLinear_2554_1" [label="[]", style=dashed]; -"2291 DequantizeLinear_2554_1" -> "2292 Shape_2094" [label="[]", style=solid]; -"2291 DequantizeLinear_2554_1" -> "2295 Shape_2097" [label="[]", style=solid]; -"2291 DequantizeLinear_2554_1" -> "2298 Shape_2100" [label="[]", style=solid]; -"2291 DequantizeLinear_2554_1" -> "2303 Reshape_2105" [label="[]", style=solid]; -"2292 Shape_2094" -> "2294 Gather_2096" [label="[-1]", style=dashed]; -"2293 Constant_2095" -> "2294 Gather_2096" [label="[]", style=dashed]; -"2294 Gather_2096" -> "2307 Unsqueeze_2107" [label="[]", style=dashed]; -"2295 Shape_2097" -> "2297 Gather_2099" [label="[-1]", style=dashed]; -"2296 Constant_2098" -> "2297 Gather_2099" [label="[]", style=dashed]; -"2297 Gather_2099" -> "2308 Unsqueeze_2108" [label="[]", style=dashed]; -"2298 Shape_2100" -> "2300 Gather_2102" [label="[-1]", style=dashed]; -"2299 Constant_2101" -> "2300 Gather_2102" [label="[]", style=dashed]; -"2300 Gather_2102" -> "2301 Unsqueeze_2103" [label="[]", style=dashed]; -"2301 Unsqueeze_2103" -> "2302 Concat_2104" [label="[1]", style=dashed]; -"2302 Concat_2104" -> "2303 Reshape_2105" [label="[2]", style=dashed]; -"2303 Reshape_2105" -> "2306 Gemm_2106" [label="[]", style=solid]; -"2304 QuantizeLinear_h.9.mlp.c_fc.weight_1" -> "2305 DequantizeLinear_h.9.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; -"2305 DequantizeLinear_h.9.mlp.c_fc.weight_1" -> "2306 Gemm_2106" [label="[768, 3072]", style=solid]; -"2306 Gemm_2106" -> "2310 Reshape_2110" [label="[]", style=solid]; -"2307 Unsqueeze_2107" -> "2309 Concat_2109" [label="[1]", style=dashed]; -"2308 Unsqueeze_2108" -> "2309 Concat_2109" [label="[1]", style=dashed]; -"2309 Concat_2109" -> "2310 Reshape_2110" [label="[3]", style=dashed]; -"2310 Reshape_2110" -> "2312 Mul_2112" [label="[]", style=solid]; -"2310 Reshape_2110" -> "2314 Pow_2114" [label="[]", style=solid]; -"2310 Reshape_2110" -> "2317 Add_2117" [label="[]", style=solid]; -"2311 Constant_2111" -> "2312 Mul_2112" [label="[]", style=solid]; -"2312 Mul_2112" -> "2323 Mul_2123" [label="[]", style=solid]; -"2313 Constant_2113" -> "2314 Pow_2114" [label="[]", style=solid]; -"2314 Pow_2114" -> "2316 Mul_2116" [label="[]", style=solid]; -"2315 Constant_2115" -> "2316 Mul_2116" [label="[]", style=solid]; -"2316 Mul_2116" -> "2317 Add_2117" [label="[]", style=solid]; -"2317 Add_2117" -> "2319 Mul_2119" [label="[]", style=solid]; -"2318 Constant_2118" -> "2319 Mul_2119" [label="[]", style=solid]; -"2319 Mul_2119" -> "2320 Tanh_2120" [label="[]", style=solid]; -"2320 Tanh_2120" -> "2322 Add_2122" [label="[]", style=solid]; -"2321 Constant_2121" -> "2322 Add_2122" [label="[]", style=solid]; -"2322 Add_2122" -> "2323 Mul_2123" [label="[]", style=solid]; -"2323 Mul_2123" -> "2324 QuantizeLinear_2588_1" [label="[]", style=solid]; -"2324 QuantizeLinear_2588_1" -> "2325 DequantizeLinear_2588_1" [label="[]", style=dashed]; -"2325 DequantizeLinear_2588_1" -> "2326 Shape_2124" [label="[]", style=solid]; -"2325 DequantizeLinear_2588_1" -> "2329 Shape_2127" [label="[]", style=solid]; -"2325 DequantizeLinear_2588_1" -> "2332 Shape_2130" [label="[]", style=solid]; -"2325 DequantizeLinear_2588_1" -> "2337 Reshape_2135" [label="[]", style=solid]; -"2326 Shape_2124" -> "2328 Gather_2126" [label="[-1]", style=dashed]; -"2327 Constant_2125" -> "2328 Gather_2126" [label="[]", style=dashed]; -"2328 Gather_2126" -> "2341 Unsqueeze_2137" [label="[]", style=dashed]; -"2329 Shape_2127" -> "2331 Gather_2129" [label="[-1]", style=dashed]; -"2330 Constant_2128" -> "2331 Gather_2129" [label="[]", style=dashed]; -"2331 Gather_2129" -> "2342 Unsqueeze_2138" [label="[]", style=dashed]; -"2332 Shape_2130" -> "2334 Gather_2132" [label="[-1]", style=dashed]; -"2333 Constant_2131" -> "2334 Gather_2132" [label="[]", style=dashed]; -"2334 Gather_2132" -> "2335 Unsqueeze_2133" [label="[]", style=dashed]; -"2335 Unsqueeze_2133" -> "2336 Concat_2134" [label="[1]", style=dashed]; -"2336 Concat_2134" -> "2337 Reshape_2135" [label="[2]", style=dashed]; -"2337 Reshape_2135" -> "2340 Gemm_2136" [label="[]", style=solid]; -"2338 QuantizeLinear_h.9.mlp.c_proj.weight_1" -> "2339 DequantizeLinear_h.9.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; -"2339 DequantizeLinear_h.9.mlp.c_proj.weight_1" -> "2340 Gemm_2136" [label="[3072, 768]", style=solid]; -"2340 Gemm_2136" -> "2344 Reshape_2140" [label="[]", style=solid]; -"2341 Unsqueeze_2137" -> "2343 Concat_2139" [label="[1]", style=dashed]; -"2342 Unsqueeze_2138" -> "2343 Concat_2139" [label="[1]", style=dashed]; -"2343 Concat_2139" -> "2344 Reshape_2140" [label="[3]", style=dashed]; -"2344 Reshape_2140" -> "2345 Add_2141" [label="[]", style=solid]; -"2345 Add_2141" -> "2346 ReduceMean_2142" [label="[]", style=solid]; -"2345 Add_2141" -> "2347 Sub_2143" [label="[]", style=solid]; -"2345 Add_2141" -> "2509 Add_2293" [label="[]", style=solid]; -"2346 ReduceMean_2142" -> "2347 Sub_2143" [label="[]", style=solid]; -"2347 Sub_2143" -> "2349 Pow_2145" [label="[]", style=solid]; -"2347 Sub_2143" -> "2354 Div_2150" [label="[]", style=solid]; -"2348 Constant_2144" -> "2349 Pow_2145" [label="[]", style=solid]; -"2349 Pow_2145" -> "2350 ReduceMean_2146" [label="[]", style=solid]; -"2350 ReduceMean_2146" -> "2352 Add_2148" [label="[]", style=solid]; -"2351 Constant_2147" -> "2352 Add_2148" [label="[]", style=solid]; -"2352 Add_2148" -> "2353 Sqrt_2149" [label="[]", style=solid]; -"2353 Sqrt_2149" -> "2354 Div_2150" [label="[]", style=solid]; -"2354 Div_2150" -> "2355 Mul_2151" [label="[]", style=solid]; -"2355 Mul_2151" -> "2356 Add_2152" [label="[]", style=solid]; -"2356 Add_2152" -> "2357 QuantizeLinear_2621_1" [label="[]", style=solid]; -"2357 QuantizeLinear_2621_1" -> "2358 DequantizeLinear_2621_1" [label="[]", style=dashed]; -"2358 DequantizeLinear_2621_1" -> "2359 Shape_2153" [label="[]", style=solid]; -"2358 DequantizeLinear_2621_1" -> "2362 Shape_2156" [label="[]", style=solid]; -"2358 DequantizeLinear_2621_1" -> "2365 Shape_2159" [label="[]", style=solid]; -"2358 DequantizeLinear_2621_1" -> "2370 Reshape_2164" [label="[]", style=solid]; -"2359 Shape_2153" -> "2361 Gather_2155" [label="[-1]", style=dashed]; -"2360 Constant_2154" -> "2361 Gather_2155" [label="[]", style=dashed]; -"2361 Gather_2155" -> "2374 Unsqueeze_2166" [label="[]", style=dashed]; -"2362 Shape_2156" -> "2364 Gather_2158" [label="[-1]", style=dashed]; -"2363 Constant_2157" -> "2364 Gather_2158" [label="[]", style=dashed]; -"2364 Gather_2158" -> "2375 Unsqueeze_2167" [label="[]", style=dashed]; -"2365 Shape_2159" -> "2367 Gather_2161" [label="[-1]", style=dashed]; -"2366 Constant_2160" -> "2367 Gather_2161" [label="[]", style=dashed]; -"2367 Gather_2161" -> "2368 Unsqueeze_2162" [label="[]", style=dashed]; -"2368 Unsqueeze_2162" -> "2369 Concat_2163" [label="[1]", style=dashed]; -"2369 Concat_2163" -> "2370 Reshape_2164" [label="[2]", style=dashed]; -"2370 Reshape_2164" -> "2373 Gemm_2165" [label="[]", style=solid]; -"2371 QuantizeLinear_h.10.attn.c_attn.weight_1" -> "2372 DequantizeLinear_h.10.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; -"2372 DequantizeLinear_h.10.attn.c_attn.weight_1" -> "2373 Gemm_2165" [label="[768, 2304]", style=solid]; -"2373 Gemm_2165" -> "2377 Reshape_2169" [label="[]", style=solid]; -"2374 Unsqueeze_2166" -> "2376 Concat_2168" [label="[1]", style=dashed]; -"2375 Unsqueeze_2167" -> "2376 Concat_2168" [label="[1]", style=dashed]; -"2376 Concat_2168" -> "2377 Reshape_2169" [label="[3]", style=dashed]; -"2377 Reshape_2169" -> "2378 Split_2170" [label="[]", style=solid]; -"2378 Split_2170" -> "2379 QuantizeLinear_query.21_1" [label="[]", style=solid]; -"2378 Split_2170" -> "2381 Shape_2171" [label="[]", style=solid]; -"2378 Split_2170" -> "2384 Shape_2174" [label="[]", style=solid]; -"2378 Split_2170" -> "2387 Shape_2177" [label="[]", style=solid]; -"2378 Split_2170" -> "2400 Shape_2190" [label="[]", style=solid]; -"2378 Split_2170" -> "2403 Shape_2193" [label="[]", style=solid]; -"2378 Split_2170" -> "2406 Shape_2196" [label="[]", style=solid]; -"2378 Split_2170" -> "2417 Reshape_2207" [label="[]", style=solid]; -"2378 Split_2170" -> "2421 Shape_2209" [label="[]", style=solid]; -"2378 Split_2170" -> "2424 Shape_2212" [label="[]", style=solid]; -"2378 Split_2170" -> "2427 Shape_2215" [label="[]", style=solid]; -"2378 Split_2170" -> "2438 Reshape_2226" [label="[]", style=solid]; -"2379 QuantizeLinear_query.21_1" -> "2380 DequantizeLinear_query.21_1" [label="[]", style=dashed]; -"2380 DequantizeLinear_query.21_1" -> "2398 Reshape_2188" [label="[]", style=solid]; -"2381 Shape_2171" -> "2383 Gather_2173" [label="[-1]", style=dashed]; -"2382 Constant_2172" -> "2383 Gather_2173" [label="[]", style=dashed]; -"2383 Gather_2173" -> "2394 Unsqueeze_2184" [label="[]", style=dashed]; -"2384 Shape_2174" -> "2386 Gather_2176" [label="[-1]", style=dashed]; -"2385 Constant_2175" -> "2386 Gather_2176" [label="[]", style=dashed]; -"2386 Gather_2176" -> "2395 Unsqueeze_2185" [label="[]", style=dashed]; -"2387 Shape_2177" -> "2389 Gather_2179" [label="[-1]", style=dashed]; -"2388 Constant_2178" -> "2389 Gather_2179" [label="[]", style=dashed]; -"2389 Gather_2179" -> "2391 Div_2181" [label="[]", style=dashed]; -"2390 Constant_2180" -> "2391 Div_2181" [label="[]", style=dashed]; -"2391 Div_2181" -> "2392 Cast_2182" [label="[]", style=dashed]; -"2392 Cast_2182" -> "2393 Cast_2183" [label="[]", style=dashed]; -"2393 Cast_2183" -> "2396 Unsqueeze_2186" [label="[]", style=dashed]; -"2394 Unsqueeze_2184" -> "2397 Concat_2187" [label="[1]", style=dashed]; -"2395 Unsqueeze_2185" -> "2397 Concat_2187" [label="[1]", style=dashed]; -"2396 Unsqueeze_2186" -> "2397 Concat_2187" [label="[1]", style=dashed]; -"2397 Concat_2187" -> "2398 Reshape_2188" [label="[4]", style=dashed]; -"2398 Reshape_2188" -> "2399 Transpose_2189" [label="[]", style=solid]; -"2399 Transpose_2189" -> "2444 MatMul_2232" [label="[]", style=solid]; -"2400 Shape_2190" -> "2402 Gather_2192" [label="[-1]", style=dashed]; -"2401 Constant_2191" -> "2402 Gather_2192" [label="[]", style=dashed]; -"2402 Gather_2192" -> "2413 Unsqueeze_2203" [label="[]", style=dashed]; -"2403 Shape_2193" -> "2405 Gather_2195" [label="[-1]", style=dashed]; -"2404 Constant_2194" -> "2405 Gather_2195" [label="[]", style=dashed]; -"2405 Gather_2195" -> "2414 Unsqueeze_2204" [label="[]", style=dashed]; -"2406 Shape_2196" -> "2408 Gather_2198" [label="[-1]", style=dashed]; -"2407 Constant_2197" -> "2408 Gather_2198" [label="[]", style=dashed]; -"2408 Gather_2198" -> "2410 Div_2200" [label="[]", style=dashed]; -"2409 Constant_2199" -> "2410 Div_2200" [label="[]", style=dashed]; -"2410 Div_2200" -> "2411 Cast_2201" [label="[]", style=dashed]; -"2411 Cast_2201" -> "2412 Cast_2202" [label="[]", style=dashed]; -"2412 Cast_2202" -> "2415 Unsqueeze_2205" [label="[]", style=dashed]; -"2413 Unsqueeze_2203" -> "2416 Concat_2206" [label="[1]", style=dashed]; -"2414 Unsqueeze_2204" -> "2416 Concat_2206" [label="[1]", style=dashed]; -"2415 Unsqueeze_2205" -> "2416 Concat_2206" [label="[1]", style=dashed]; -"2416 Concat_2206" -> "2417 Reshape_2207" [label="[4]", style=dashed]; -"2417 Reshape_2207" -> "2418 QuantizeLinear_2686_1" [label="[]", style=solid]; -"2417 Reshape_2207" -> "2440 Transpose_2228" [label="[]", style=solid]; -"2418 QuantizeLinear_2686_1" -> "2419 DequantizeLinear_2686_1" [label="[]", style=dashed]; -"2419 DequantizeLinear_2686_1" -> "2420 Transpose_2208" [label="[]", style=solid]; -"2420 Transpose_2208" -> "2444 MatMul_2232" [label="[]", style=solid]; -"2421 Shape_2209" -> "2423 Gather_2211" [label="[-1]", style=dashed]; -"2422 Constant_2210" -> "2423 Gather_2211" [label="[]", style=dashed]; -"2423 Gather_2211" -> "2434 Unsqueeze_2222" [label="[]", style=dashed]; -"2424 Shape_2212" -> "2426 Gather_2214" [label="[-1]", style=dashed]; -"2425 Constant_2213" -> "2426 Gather_2214" [label="[]", style=dashed]; -"2426 Gather_2214" -> "2435 Unsqueeze_2223" [label="[]", style=dashed]; -"2427 Shape_2215" -> "2429 Gather_2217" [label="[-1]", style=dashed]; -"2428 Constant_2216" -> "2429 Gather_2217" [label="[]", style=dashed]; -"2429 Gather_2217" -> "2431 Div_2219" [label="[]", style=dashed]; -"2430 Constant_2218" -> "2431 Div_2219" [label="[]", style=dashed]; -"2431 Div_2219" -> "2432 Cast_2220" [label="[]", style=dashed]; -"2432 Cast_2220" -> "2433 Cast_2221" [label="[]", style=dashed]; -"2433 Cast_2221" -> "2436 Unsqueeze_2224" [label="[]", style=dashed]; -"2434 Unsqueeze_2222" -> "2437 Concat_2225" [label="[1]", style=dashed]; -"2435 Unsqueeze_2223" -> "2437 Concat_2225" [label="[1]", style=dashed]; -"2436 Unsqueeze_2224" -> "2437 Concat_2225" [label="[1]", style=dashed]; -"2437 Concat_2225" -> "2438 Reshape_2226" [label="[4]", style=dashed]; -"2438 Reshape_2226" -> "2439 Transpose_2227" [label="[]", style=solid]; -"2439 Transpose_2227" -> "2442 Unsqueeze_2230" [label="[]", style=solid]; -"2439 Transpose_2227" -> "2468 MatMul_2256" [label="[]", style=solid]; -"2440 Transpose_2228" -> "2441 Unsqueeze_2229" [label="[]", style=solid]; -"2441 Unsqueeze_2229" -> "2443 Concat_2231" [label="[]", style=solid]; -"2442 Unsqueeze_2230" -> "2443 Concat_2231" [label="[]", style=solid]; -"2443 Concat_2231" -> "2837 nncf_model_output_11" [label="[2, 1, 12, 8, 64]", style=solid]; -"2444 MatMul_2232" -> "2446 Div_2234" [label="[]", style=solid]; -"2445 Constant_2233" -> "2446 Div_2234" [label="[]", style=solid]; -"2446 Div_2234" -> "2447 Shape_2235" [label="[]", style=solid]; -"2446 Div_2234" -> "2450 Shape_2238" [label="[]", style=solid]; -"2446 Div_2234" -> "2461 Mul_2249" [label="[]", style=solid]; -"2447 Shape_2235" -> "2449 Gather_2237" [label="[-1]", style=dashed]; -"2448 Constant_2236" -> "2449 Gather_2237" [label="[]", style=dashed]; -"2449 Gather_2237" -> "2453 Sub_2241" [label="[]", style=dashed]; -"2450 Shape_2238" -> "2452 Gather_2240" [label="[-1]", style=dashed]; -"2451 Constant_2239" -> "2452 Gather_2240" [label="[]", style=dashed]; -"2452 Gather_2240" -> "2453 Sub_2241" [label="[]", style=dashed]; -"2452 Gather_2240" -> "2455 Unsqueeze_2243" [label="[]", style=dashed]; -"2452 Gather_2240" -> "2458 Unsqueeze_2246" [label="[]", style=dashed]; -"2453 Sub_2241" -> "2454 Unsqueeze_2242" [label="[]", style=dashed]; -"2454 Unsqueeze_2242" -> "2457 Slice_2245" [label="[1]", style=dashed]; -"2455 Unsqueeze_2243" -> "2457 Slice_2245" [label="[1]", style=dashed]; -"2456 Constant_2244" -> "2457 Slice_2245" [label="[1]", style=dashed]; -"2457 Slice_2245" -> "2460 Slice_2248" [label="[]", style=solid]; -"2458 Unsqueeze_2246" -> "2460 Slice_2248" [label="[1]", style=dashed]; -"2459 Constant_2247" -> "2460 Slice_2248" [label="[1]", style=dashed]; -"2460 Slice_2248" -> "2461 Mul_2249" [label="[]", style=solid]; -"2460 Slice_2248" -> "2463 Sub_2251" [label="[]", style=solid]; -"2461 Mul_2249" -> "2466 Sub_2254" [label="[]", style=solid]; -"2462 Constant_2250" -> "2463 Sub_2251" [label="[]", style=solid]; -"2463 Sub_2251" -> "2465 Mul_2253" [label="[]", style=solid]; -"2464 Constant_2252" -> "2465 Mul_2253" [label="[]", style=solid]; -"2465 Mul_2253" -> "2466 Sub_2254" [label="[]", style=solid]; -"2466 Sub_2254" -> "2467 Softmax_2255" [label="[]", style=solid]; -"2467 Softmax_2255" -> "2468 MatMul_2256" [label="[]", style=solid]; -"2468 MatMul_2256" -> "2469 QuantizeLinear_2743_1" [label="[]", style=solid]; -"2469 QuantizeLinear_2743_1" -> "2470 DequantizeLinear_2743_1" [label="[]", style=dashed]; -"2470 DequantizeLinear_2743_1" -> "2471 Transpose_2257" [label="[]", style=solid]; -"2471 Transpose_2257" -> "2472 Shape_2258" [label="[]", style=solid]; -"2471 Transpose_2257" -> "2475 Shape_2261" [label="[]", style=solid]; -"2471 Transpose_2257" -> "2478 Shape_2264" [label="[]", style=solid]; -"2471 Transpose_2257" -> "2481 Shape_2267" [label="[]", style=solid]; -"2471 Transpose_2257" -> "2489 Reshape_2275" [label="[]", style=solid]; -"2472 Shape_2258" -> "2474 Gather_2260" [label="[-1]", style=dashed]; -"2473 Constant_2259" -> "2474 Gather_2260" [label="[]", style=dashed]; -"2474 Gather_2260" -> "2485 Unsqueeze_2271" [label="[]", style=dashed]; -"2475 Shape_2261" -> "2477 Gather_2263" [label="[-1]", style=dashed]; -"2476 Constant_2262" -> "2477 Gather_2263" [label="[]", style=dashed]; -"2477 Gather_2263" -> "2486 Unsqueeze_2272" [label="[]", style=dashed]; -"2478 Shape_2264" -> "2480 Gather_2266" [label="[-1]", style=dashed]; -"2479 Constant_2265" -> "2480 Gather_2266" [label="[]", style=dashed]; -"2480 Gather_2266" -> "2484 Mul_2270" [label="[]", style=dashed]; -"2481 Shape_2267" -> "2483 Gather_2269" [label="[-1]", style=dashed]; -"2482 Constant_2268" -> "2483 Gather_2269" [label="[]", style=dashed]; -"2483 Gather_2269" -> "2484 Mul_2270" [label="[]", style=dashed]; -"2484 Mul_2270" -> "2487 Unsqueeze_2273" [label="[]", style=dashed]; -"2485 Unsqueeze_2271" -> "2488 Concat_2274" [label="[1]", style=dashed]; -"2486 Unsqueeze_2272" -> "2488 Concat_2274" [label="[1]", style=dashed]; -"2487 Unsqueeze_2273" -> "2488 Concat_2274" [label="[1]", style=dashed]; -"2488 Concat_2274" -> "2489 Reshape_2275" [label="[3]", style=dashed]; -"2489 Reshape_2275" -> "2490 Shape_2276" [label="[]", style=solid]; -"2489 Reshape_2275" -> "2493 Shape_2279" [label="[]", style=solid]; -"2489 Reshape_2275" -> "2496 Shape_2282" [label="[]", style=solid]; -"2489 Reshape_2275" -> "2501 Reshape_2287" [label="[]", style=solid]; -"2490 Shape_2276" -> "2492 Gather_2278" [label="[-1]", style=dashed]; -"2491 Constant_2277" -> "2492 Gather_2278" [label="[]", style=dashed]; -"2492 Gather_2278" -> "2505 Unsqueeze_2289" [label="[]", style=dashed]; -"2493 Shape_2279" -> "2495 Gather_2281" [label="[-1]", style=dashed]; -"2494 Constant_2280" -> "2495 Gather_2281" [label="[]", style=dashed]; -"2495 Gather_2281" -> "2506 Unsqueeze_2290" [label="[]", style=dashed]; -"2496 Shape_2282" -> "2498 Gather_2284" [label="[-1]", style=dashed]; -"2497 Constant_2283" -> "2498 Gather_2284" [label="[]", style=dashed]; -"2498 Gather_2284" -> "2499 Unsqueeze_2285" [label="[]", style=dashed]; -"2499 Unsqueeze_2285" -> "2500 Concat_2286" [label="[1]", style=dashed]; -"2500 Concat_2286" -> "2501 Reshape_2287" [label="[2]", style=dashed]; -"2501 Reshape_2287" -> "2504 Gemm_2288" [label="[]", style=solid]; -"2502 QuantizeLinear_h.10.attn.c_proj.weight_1" -> "2503 DequantizeLinear_h.10.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; -"2503 DequantizeLinear_h.10.attn.c_proj.weight_1" -> "2504 Gemm_2288" [label="[768, 768]", style=solid]; -"2504 Gemm_2288" -> "2508 Reshape_2292" [label="[]", style=solid]; -"2505 Unsqueeze_2289" -> "2507 Concat_2291" [label="[1]", style=dashed]; -"2506 Unsqueeze_2290" -> "2507 Concat_2291" [label="[1]", style=dashed]; -"2507 Concat_2291" -> "2508 Reshape_2292" [label="[3]", style=dashed]; -"2508 Reshape_2292" -> "2509 Add_2293" [label="[]", style=solid]; -"2509 Add_2293" -> "2510 ReduceMean_2294" [label="[]", style=solid]; -"2509 Add_2293" -> "2511 Sub_2295" [label="[]", style=solid]; -"2509 Add_2293" -> "2576 Add_2352" [label="[]", style=solid]; -"2510 ReduceMean_2294" -> "2511 Sub_2295" [label="[]", style=solid]; -"2511 Sub_2295" -> "2513 Pow_2297" [label="[]", style=solid]; -"2511 Sub_2295" -> "2518 Div_2302" [label="[]", style=solid]; -"2512 Constant_2296" -> "2513 Pow_2297" [label="[]", style=solid]; -"2513 Pow_2297" -> "2514 ReduceMean_2298" [label="[]", style=solid]; -"2514 ReduceMean_2298" -> "2516 Add_2300" [label="[]", style=solid]; -"2515 Constant_2299" -> "2516 Add_2300" [label="[]", style=solid]; -"2516 Add_2300" -> "2517 Sqrt_2301" [label="[]", style=solid]; -"2517 Sqrt_2301" -> "2518 Div_2302" [label="[]", style=solid]; -"2518 Div_2302" -> "2519 Mul_2303" [label="[]", style=solid]; -"2519 Mul_2303" -> "2520 Add_2304" [label="[]", style=solid]; -"2520 Add_2304" -> "2521 QuantizeLinear_2795_1" [label="[]", style=solid]; -"2521 QuantizeLinear_2795_1" -> "2522 DequantizeLinear_2795_1" [label="[]", style=dashed]; -"2522 DequantizeLinear_2795_1" -> "2523 Shape_2305" [label="[]", style=solid]; -"2522 DequantizeLinear_2795_1" -> "2526 Shape_2308" [label="[]", style=solid]; -"2522 DequantizeLinear_2795_1" -> "2529 Shape_2311" [label="[]", style=solid]; -"2522 DequantizeLinear_2795_1" -> "2534 Reshape_2316" [label="[]", style=solid]; -"2523 Shape_2305" -> "2525 Gather_2307" [label="[-1]", style=dashed]; -"2524 Constant_2306" -> "2525 Gather_2307" [label="[]", style=dashed]; -"2525 Gather_2307" -> "2538 Unsqueeze_2318" [label="[]", style=dashed]; -"2526 Shape_2308" -> "2528 Gather_2310" [label="[-1]", style=dashed]; -"2527 Constant_2309" -> "2528 Gather_2310" [label="[]", style=dashed]; -"2528 Gather_2310" -> "2539 Unsqueeze_2319" [label="[]", style=dashed]; -"2529 Shape_2311" -> "2531 Gather_2313" [label="[-1]", style=dashed]; -"2530 Constant_2312" -> "2531 Gather_2313" [label="[]", style=dashed]; -"2531 Gather_2313" -> "2532 Unsqueeze_2314" [label="[]", style=dashed]; -"2532 Unsqueeze_2314" -> "2533 Concat_2315" [label="[1]", style=dashed]; -"2533 Concat_2315" -> "2534 Reshape_2316" [label="[2]", style=dashed]; -"2534 Reshape_2316" -> "2537 Gemm_2317" [label="[]", style=solid]; -"2535 QuantizeLinear_h.10.mlp.c_fc.weight_1" -> "2536 DequantizeLinear_h.10.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; -"2536 DequantizeLinear_h.10.mlp.c_fc.weight_1" -> "2537 Gemm_2317" [label="[768, 3072]", style=solid]; -"2537 Gemm_2317" -> "2541 Reshape_2321" [label="[]", style=solid]; -"2538 Unsqueeze_2318" -> "2540 Concat_2320" [label="[1]", style=dashed]; -"2539 Unsqueeze_2319" -> "2540 Concat_2320" [label="[1]", style=dashed]; -"2540 Concat_2320" -> "2541 Reshape_2321" [label="[3]", style=dashed]; -"2541 Reshape_2321" -> "2543 Mul_2323" [label="[]", style=solid]; -"2541 Reshape_2321" -> "2545 Pow_2325" [label="[]", style=solid]; -"2541 Reshape_2321" -> "2548 Add_2328" [label="[]", style=solid]; -"2542 Constant_2322" -> "2543 Mul_2323" [label="[]", style=solid]; -"2543 Mul_2323" -> "2554 Mul_2334" [label="[]", style=solid]; -"2544 Constant_2324" -> "2545 Pow_2325" [label="[]", style=solid]; -"2545 Pow_2325" -> "2547 Mul_2327" [label="[]", style=solid]; -"2546 Constant_2326" -> "2547 Mul_2327" [label="[]", style=solid]; -"2547 Mul_2327" -> "2548 Add_2328" [label="[]", style=solid]; -"2548 Add_2328" -> "2550 Mul_2330" [label="[]", style=solid]; -"2549 Constant_2329" -> "2550 Mul_2330" [label="[]", style=solid]; -"2550 Mul_2330" -> "2551 Tanh_2331" [label="[]", style=solid]; -"2551 Tanh_2331" -> "2553 Add_2333" [label="[]", style=solid]; -"2552 Constant_2332" -> "2553 Add_2333" [label="[]", style=solid]; -"2553 Add_2333" -> "2554 Mul_2334" [label="[]", style=solid]; -"2554 Mul_2334" -> "2555 QuantizeLinear_2829_1" [label="[]", style=solid]; -"2555 QuantizeLinear_2829_1" -> "2556 DequantizeLinear_2829_1" [label="[]", style=dashed]; -"2556 DequantizeLinear_2829_1" -> "2557 Shape_2335" [label="[]", style=solid]; -"2556 DequantizeLinear_2829_1" -> "2560 Shape_2338" [label="[]", style=solid]; -"2556 DequantizeLinear_2829_1" -> "2563 Shape_2341" [label="[]", style=solid]; -"2556 DequantizeLinear_2829_1" -> "2568 Reshape_2346" [label="[]", style=solid]; -"2557 Shape_2335" -> "2559 Gather_2337" [label="[-1]", style=dashed]; -"2558 Constant_2336" -> "2559 Gather_2337" [label="[]", style=dashed]; -"2559 Gather_2337" -> "2572 Unsqueeze_2348" [label="[]", style=dashed]; -"2560 Shape_2338" -> "2562 Gather_2340" [label="[-1]", style=dashed]; -"2561 Constant_2339" -> "2562 Gather_2340" [label="[]", style=dashed]; -"2562 Gather_2340" -> "2573 Unsqueeze_2349" [label="[]", style=dashed]; -"2563 Shape_2341" -> "2565 Gather_2343" [label="[-1]", style=dashed]; -"2564 Constant_2342" -> "2565 Gather_2343" [label="[]", style=dashed]; -"2565 Gather_2343" -> "2566 Unsqueeze_2344" [label="[]", style=dashed]; -"2566 Unsqueeze_2344" -> "2567 Concat_2345" [label="[1]", style=dashed]; -"2567 Concat_2345" -> "2568 Reshape_2346" [label="[2]", style=dashed]; -"2568 Reshape_2346" -> "2571 Gemm_2347" [label="[]", style=solid]; -"2569 QuantizeLinear_h.10.mlp.c_proj.weight_1" -> "2570 DequantizeLinear_h.10.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; -"2570 DequantizeLinear_h.10.mlp.c_proj.weight_1" -> "2571 Gemm_2347" [label="[3072, 768]", style=solid]; -"2571 Gemm_2347" -> "2575 Reshape_2351" [label="[]", style=solid]; -"2572 Unsqueeze_2348" -> "2574 Concat_2350" [label="[1]", style=dashed]; -"2573 Unsqueeze_2349" -> "2574 Concat_2350" [label="[1]", style=dashed]; -"2574 Concat_2350" -> "2575 Reshape_2351" [label="[3]", style=dashed]; -"2575 Reshape_2351" -> "2576 Add_2352" [label="[]", style=solid]; -"2576 Add_2352" -> "2577 ReduceMean_2353" [label="[]", style=solid]; -"2576 Add_2352" -> "2578 Sub_2354" [label="[]", style=solid]; -"2576 Add_2352" -> "2740 Add_2504" [label="[]", style=solid]; -"2577 ReduceMean_2353" -> "2578 Sub_2354" [label="[]", style=solid]; -"2578 Sub_2354" -> "2580 Pow_2356" [label="[]", style=solid]; -"2578 Sub_2354" -> "2585 Div_2361" [label="[]", style=solid]; -"2579 Constant_2355" -> "2580 Pow_2356" [label="[]", style=solid]; -"2580 Pow_2356" -> "2581 ReduceMean_2357" [label="[]", style=solid]; -"2581 ReduceMean_2357" -> "2583 Add_2359" [label="[]", style=solid]; -"2582 Constant_2358" -> "2583 Add_2359" [label="[]", style=solid]; -"2583 Add_2359" -> "2584 Sqrt_2360" [label="[]", style=solid]; -"2584 Sqrt_2360" -> "2585 Div_2361" [label="[]", style=solid]; -"2585 Div_2361" -> "2586 Mul_2362" [label="[]", style=solid]; -"2586 Mul_2362" -> "2587 Add_2363" [label="[]", style=solid]; -"2587 Add_2363" -> "2588 QuantizeLinear_2862_1" [label="[]", style=solid]; -"2588 QuantizeLinear_2862_1" -> "2589 DequantizeLinear_2862_1" [label="[]", style=dashed]; -"2589 DequantizeLinear_2862_1" -> "2590 Shape_2364" [label="[]", style=solid]; -"2589 DequantizeLinear_2862_1" -> "2593 Shape_2367" [label="[]", style=solid]; -"2589 DequantizeLinear_2862_1" -> "2596 Shape_2370" [label="[]", style=solid]; -"2589 DequantizeLinear_2862_1" -> "2601 Reshape_2375" [label="[]", style=solid]; -"2590 Shape_2364" -> "2592 Gather_2366" [label="[-1]", style=dashed]; -"2591 Constant_2365" -> "2592 Gather_2366" [label="[]", style=dashed]; -"2592 Gather_2366" -> "2605 Unsqueeze_2377" [label="[]", style=dashed]; -"2593 Shape_2367" -> "2595 Gather_2369" [label="[-1]", style=dashed]; -"2594 Constant_2368" -> "2595 Gather_2369" [label="[]", style=dashed]; -"2595 Gather_2369" -> "2606 Unsqueeze_2378" [label="[]", style=dashed]; -"2596 Shape_2370" -> "2598 Gather_2372" [label="[-1]", style=dashed]; -"2597 Constant_2371" -> "2598 Gather_2372" [label="[]", style=dashed]; -"2598 Gather_2372" -> "2599 Unsqueeze_2373" [label="[]", style=dashed]; -"2599 Unsqueeze_2373" -> "2600 Concat_2374" [label="[1]", style=dashed]; -"2600 Concat_2374" -> "2601 Reshape_2375" [label="[2]", style=dashed]; -"2601 Reshape_2375" -> "2604 Gemm_2376" [label="[]", style=solid]; -"2602 QuantizeLinear_h.11.attn.c_attn.weight_1" -> "2603 DequantizeLinear_h.11.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; -"2603 DequantizeLinear_h.11.attn.c_attn.weight_1" -> "2604 Gemm_2376" [label="[768, 2304]", style=solid]; -"2604 Gemm_2376" -> "2608 Reshape_2380" [label="[]", style=solid]; -"2605 Unsqueeze_2377" -> "2607 Concat_2379" [label="[1]", style=dashed]; -"2606 Unsqueeze_2378" -> "2607 Concat_2379" [label="[1]", style=dashed]; -"2607 Concat_2379" -> "2608 Reshape_2380" [label="[3]", style=dashed]; -"2608 Reshape_2380" -> "2609 Split_2381" [label="[]", style=solid]; -"2609 Split_2381" -> "2610 QuantizeLinear_query.23_1" [label="[]", style=solid]; -"2609 Split_2381" -> "2612 Shape_2382" [label="[]", style=solid]; -"2609 Split_2381" -> "2615 Shape_2385" [label="[]", style=solid]; -"2609 Split_2381" -> "2618 Shape_2388" [label="[]", style=solid]; -"2609 Split_2381" -> "2631 Shape_2401" [label="[]", style=solid]; -"2609 Split_2381" -> "2634 Shape_2404" [label="[]", style=solid]; -"2609 Split_2381" -> "2637 Shape_2407" [label="[]", style=solid]; -"2609 Split_2381" -> "2648 Reshape_2418" [label="[]", style=solid]; -"2609 Split_2381" -> "2652 Shape_2420" [label="[]", style=solid]; -"2609 Split_2381" -> "2655 Shape_2423" [label="[]", style=solid]; -"2609 Split_2381" -> "2658 Shape_2426" [label="[]", style=solid]; -"2609 Split_2381" -> "2669 Reshape_2437" [label="[]", style=solid]; -"2610 QuantizeLinear_query.23_1" -> "2611 DequantizeLinear_query.23_1" [label="[]", style=dashed]; -"2611 DequantizeLinear_query.23_1" -> "2629 Reshape_2399" [label="[]", style=solid]; -"2612 Shape_2382" -> "2614 Gather_2384" [label="[-1]", style=dashed]; -"2613 Constant_2383" -> "2614 Gather_2384" [label="[]", style=dashed]; -"2614 Gather_2384" -> "2625 Unsqueeze_2395" [label="[]", style=dashed]; -"2615 Shape_2385" -> "2617 Gather_2387" [label="[-1]", style=dashed]; -"2616 Constant_2386" -> "2617 Gather_2387" [label="[]", style=dashed]; -"2617 Gather_2387" -> "2626 Unsqueeze_2396" [label="[]", style=dashed]; -"2618 Shape_2388" -> "2620 Gather_2390" [label="[-1]", style=dashed]; -"2619 Constant_2389" -> "2620 Gather_2390" [label="[]", style=dashed]; -"2620 Gather_2390" -> "2622 Div_2392" [label="[]", style=dashed]; -"2621 Constant_2391" -> "2622 Div_2392" [label="[]", style=dashed]; -"2622 Div_2392" -> "2623 Cast_2393" [label="[]", style=dashed]; -"2623 Cast_2393" -> "2624 Cast_2394" [label="[]", style=dashed]; -"2624 Cast_2394" -> "2627 Unsqueeze_2397" [label="[]", style=dashed]; -"2625 Unsqueeze_2395" -> "2628 Concat_2398" [label="[1]", style=dashed]; -"2626 Unsqueeze_2396" -> "2628 Concat_2398" [label="[1]", style=dashed]; -"2627 Unsqueeze_2397" -> "2628 Concat_2398" [label="[1]", style=dashed]; -"2628 Concat_2398" -> "2629 Reshape_2399" [label="[4]", style=dashed]; -"2629 Reshape_2399" -> "2630 Transpose_2400" [label="[]", style=solid]; -"2630 Transpose_2400" -> "2675 MatMul_2443" [label="[]", style=solid]; -"2631 Shape_2401" -> "2633 Gather_2403" [label="[-1]", style=dashed]; -"2632 Constant_2402" -> "2633 Gather_2403" [label="[]", style=dashed]; -"2633 Gather_2403" -> "2644 Unsqueeze_2414" [label="[]", style=dashed]; -"2634 Shape_2404" -> "2636 Gather_2406" [label="[-1]", style=dashed]; -"2635 Constant_2405" -> "2636 Gather_2406" [label="[]", style=dashed]; -"2636 Gather_2406" -> "2645 Unsqueeze_2415" [label="[]", style=dashed]; -"2637 Shape_2407" -> "2639 Gather_2409" [label="[-1]", style=dashed]; -"2638 Constant_2408" -> "2639 Gather_2409" [label="[]", style=dashed]; -"2639 Gather_2409" -> "2641 Div_2411" [label="[]", style=dashed]; -"2640 Constant_2410" -> "2641 Div_2411" [label="[]", style=dashed]; -"2641 Div_2411" -> "2642 Cast_2412" [label="[]", style=dashed]; -"2642 Cast_2412" -> "2643 Cast_2413" [label="[]", style=dashed]; -"2643 Cast_2413" -> "2646 Unsqueeze_2416" [label="[]", style=dashed]; -"2644 Unsqueeze_2414" -> "2647 Concat_2417" [label="[1]", style=dashed]; -"2645 Unsqueeze_2415" -> "2647 Concat_2417" [label="[1]", style=dashed]; -"2646 Unsqueeze_2416" -> "2647 Concat_2417" [label="[1]", style=dashed]; -"2647 Concat_2417" -> "2648 Reshape_2418" [label="[4]", style=dashed]; -"2648 Reshape_2418" -> "2649 QuantizeLinear_2927_1" [label="[]", style=solid]; -"2648 Reshape_2418" -> "2671 Transpose_2439" [label="[]", style=solid]; -"2649 QuantizeLinear_2927_1" -> "2650 DequantizeLinear_2927_1" [label="[]", style=dashed]; -"2650 DequantizeLinear_2927_1" -> "2651 Transpose_2419" [label="[]", style=solid]; -"2651 Transpose_2419" -> "2675 MatMul_2443" [label="[]", style=solid]; -"2652 Shape_2420" -> "2654 Gather_2422" [label="[-1]", style=dashed]; -"2653 Constant_2421" -> "2654 Gather_2422" [label="[]", style=dashed]; -"2654 Gather_2422" -> "2665 Unsqueeze_2433" [label="[]", style=dashed]; -"2655 Shape_2423" -> "2657 Gather_2425" [label="[-1]", style=dashed]; -"2656 Constant_2424" -> "2657 Gather_2425" [label="[]", style=dashed]; -"2657 Gather_2425" -> "2666 Unsqueeze_2434" [label="[]", style=dashed]; -"2658 Shape_2426" -> "2660 Gather_2428" [label="[-1]", style=dashed]; -"2659 Constant_2427" -> "2660 Gather_2428" [label="[]", style=dashed]; -"2660 Gather_2428" -> "2662 Div_2430" [label="[]", style=dashed]; -"2661 Constant_2429" -> "2662 Div_2430" [label="[]", style=dashed]; -"2662 Div_2430" -> "2663 Cast_2431" [label="[]", style=dashed]; -"2663 Cast_2431" -> "2664 Cast_2432" [label="[]", style=dashed]; -"2664 Cast_2432" -> "2667 Unsqueeze_2435" [label="[]", style=dashed]; -"2665 Unsqueeze_2433" -> "2668 Concat_2436" [label="[1]", style=dashed]; -"2666 Unsqueeze_2434" -> "2668 Concat_2436" [label="[1]", style=dashed]; -"2667 Unsqueeze_2435" -> "2668 Concat_2436" [label="[1]", style=dashed]; -"2668 Concat_2436" -> "2669 Reshape_2437" [label="[4]", style=dashed]; -"2669 Reshape_2437" -> "2670 Transpose_2438" [label="[]", style=solid]; -"2670 Transpose_2438" -> "2673 Unsqueeze_2441" [label="[]", style=solid]; -"2670 Transpose_2438" -> "2699 MatMul_2467" [label="[]", style=solid]; -"2671 Transpose_2439" -> "2672 Unsqueeze_2440" [label="[]", style=solid]; -"2672 Unsqueeze_2440" -> "2674 Concat_2442" [label="[]", style=solid]; -"2673 Unsqueeze_2441" -> "2674 Concat_2442" [label="[]", style=solid]; -"2674 Concat_2442" -> "2838 nncf_model_output_12" [label="[2, 1, 12, 8, 64]", style=solid]; -"2675 MatMul_2443" -> "2677 Div_2445" [label="[]", style=solid]; -"2676 Constant_2444" -> "2677 Div_2445" [label="[]", style=solid]; -"2677 Div_2445" -> "2678 Shape_2446" [label="[]", style=solid]; -"2677 Div_2445" -> "2681 Shape_2449" [label="[]", style=solid]; -"2677 Div_2445" -> "2692 Mul_2460" [label="[]", style=solid]; -"2678 Shape_2446" -> "2680 Gather_2448" [label="[-1]", style=dashed]; -"2679 Constant_2447" -> "2680 Gather_2448" [label="[]", style=dashed]; -"2680 Gather_2448" -> "2684 Sub_2452" [label="[]", style=dashed]; -"2681 Shape_2449" -> "2683 Gather_2451" [label="[-1]", style=dashed]; -"2682 Constant_2450" -> "2683 Gather_2451" [label="[]", style=dashed]; -"2683 Gather_2451" -> "2684 Sub_2452" [label="[]", style=dashed]; -"2683 Gather_2451" -> "2686 Unsqueeze_2454" [label="[]", style=dashed]; -"2683 Gather_2451" -> "2689 Unsqueeze_2457" [label="[]", style=dashed]; -"2684 Sub_2452" -> "2685 Unsqueeze_2453" [label="[]", style=dashed]; -"2685 Unsqueeze_2453" -> "2688 Slice_2456" [label="[1]", style=dashed]; -"2686 Unsqueeze_2454" -> "2688 Slice_2456" [label="[1]", style=dashed]; -"2687 Constant_2455" -> "2688 Slice_2456" [label="[1]", style=dashed]; -"2688 Slice_2456" -> "2691 Slice_2459" [label="[]", style=solid]; -"2689 Unsqueeze_2457" -> "2691 Slice_2459" [label="[1]", style=dashed]; -"2690 Constant_2458" -> "2691 Slice_2459" [label="[1]", style=dashed]; -"2691 Slice_2459" -> "2692 Mul_2460" [label="[]", style=solid]; -"2691 Slice_2459" -> "2694 Sub_2462" [label="[]", style=solid]; -"2692 Mul_2460" -> "2697 Sub_2465" [label="[]", style=solid]; -"2693 Constant_2461" -> "2694 Sub_2462" [label="[]", style=solid]; -"2694 Sub_2462" -> "2696 Mul_2464" [label="[]", style=solid]; -"2695 Constant_2463" -> "2696 Mul_2464" [label="[]", style=solid]; -"2696 Mul_2464" -> "2697 Sub_2465" [label="[]", style=solid]; -"2697 Sub_2465" -> "2698 Softmax_2466" [label="[]", style=solid]; -"2698 Softmax_2466" -> "2699 MatMul_2467" [label="[]", style=solid]; -"2699 MatMul_2467" -> "2700 QuantizeLinear_2984_1" [label="[]", style=solid]; -"2700 QuantizeLinear_2984_1" -> "2701 DequantizeLinear_2984_1" [label="[]", style=dashed]; -"2701 DequantizeLinear_2984_1" -> "2702 Transpose_2468" [label="[]", style=solid]; -"2702 Transpose_2468" -> "2703 Shape_2469" [label="[]", style=solid]; -"2702 Transpose_2468" -> "2706 Shape_2472" [label="[]", style=solid]; -"2702 Transpose_2468" -> "2709 Shape_2475" [label="[]", style=solid]; -"2702 Transpose_2468" -> "2712 Shape_2478" [label="[]", style=solid]; -"2702 Transpose_2468" -> "2720 Reshape_2486" [label="[]", style=solid]; -"2703 Shape_2469" -> "2705 Gather_2471" [label="[-1]", style=dashed]; -"2704 Constant_2470" -> "2705 Gather_2471" [label="[]", style=dashed]; -"2705 Gather_2471" -> "2716 Unsqueeze_2482" [label="[]", style=dashed]; -"2706 Shape_2472" -> "2708 Gather_2474" [label="[-1]", style=dashed]; -"2707 Constant_2473" -> "2708 Gather_2474" [label="[]", style=dashed]; -"2708 Gather_2474" -> "2717 Unsqueeze_2483" [label="[]", style=dashed]; -"2709 Shape_2475" -> "2711 Gather_2477" [label="[-1]", style=dashed]; -"2710 Constant_2476" -> "2711 Gather_2477" [label="[]", style=dashed]; -"2711 Gather_2477" -> "2715 Mul_2481" [label="[]", style=dashed]; -"2712 Shape_2478" -> "2714 Gather_2480" [label="[-1]", style=dashed]; -"2713 Constant_2479" -> "2714 Gather_2480" [label="[]", style=dashed]; -"2714 Gather_2480" -> "2715 Mul_2481" [label="[]", style=dashed]; -"2715 Mul_2481" -> "2718 Unsqueeze_2484" [label="[]", style=dashed]; -"2716 Unsqueeze_2482" -> "2719 Concat_2485" [label="[1]", style=dashed]; -"2717 Unsqueeze_2483" -> "2719 Concat_2485" [label="[1]", style=dashed]; -"2718 Unsqueeze_2484" -> "2719 Concat_2485" [label="[1]", style=dashed]; -"2719 Concat_2485" -> "2720 Reshape_2486" [label="[3]", style=dashed]; -"2720 Reshape_2486" -> "2721 Shape_2487" [label="[]", style=solid]; -"2720 Reshape_2486" -> "2724 Shape_2490" [label="[]", style=solid]; -"2720 Reshape_2486" -> "2727 Shape_2493" [label="[]", style=solid]; -"2720 Reshape_2486" -> "2732 Reshape_2498" [label="[]", style=solid]; -"2721 Shape_2487" -> "2723 Gather_2489" [label="[-1]", style=dashed]; -"2722 Constant_2488" -> "2723 Gather_2489" [label="[]", style=dashed]; -"2723 Gather_2489" -> "2736 Unsqueeze_2500" [label="[]", style=dashed]; -"2724 Shape_2490" -> "2726 Gather_2492" [label="[-1]", style=dashed]; -"2725 Constant_2491" -> "2726 Gather_2492" [label="[]", style=dashed]; -"2726 Gather_2492" -> "2737 Unsqueeze_2501" [label="[]", style=dashed]; -"2727 Shape_2493" -> "2729 Gather_2495" [label="[-1]", style=dashed]; -"2728 Constant_2494" -> "2729 Gather_2495" [label="[]", style=dashed]; -"2729 Gather_2495" -> "2730 Unsqueeze_2496" [label="[]", style=dashed]; -"2730 Unsqueeze_2496" -> "2731 Concat_2497" [label="[1]", style=dashed]; -"2731 Concat_2497" -> "2732 Reshape_2498" [label="[2]", style=dashed]; -"2732 Reshape_2498" -> "2735 Gemm_2499" [label="[]", style=solid]; -"2733 QuantizeLinear_h.11.attn.c_proj.weight_1" -> "2734 DequantizeLinear_h.11.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; -"2734 DequantizeLinear_h.11.attn.c_proj.weight_1" -> "2735 Gemm_2499" [label="[768, 768]", style=solid]; -"2735 Gemm_2499" -> "2739 Reshape_2503" [label="[]", style=solid]; -"2736 Unsqueeze_2500" -> "2738 Concat_2502" [label="[1]", style=dashed]; -"2737 Unsqueeze_2501" -> "2738 Concat_2502" [label="[1]", style=dashed]; -"2738 Concat_2502" -> "2739 Reshape_2503" [label="[3]", style=dashed]; -"2739 Reshape_2503" -> "2740 Add_2504" [label="[]", style=solid]; -"2740 Add_2504" -> "2741 ReduceMean_2505" [label="[]", style=solid]; -"2740 Add_2504" -> "2742 Sub_2506" [label="[]", style=solid]; -"2740 Add_2504" -> "2807 Add_2563" [label="[]", style=solid]; -"2741 ReduceMean_2505" -> "2742 Sub_2506" [label="[]", style=solid]; -"2742 Sub_2506" -> "2744 Pow_2508" [label="[]", style=solid]; -"2742 Sub_2506" -> "2749 Div_2513" [label="[]", style=solid]; -"2743 Constant_2507" -> "2744 Pow_2508" [label="[]", style=solid]; -"2744 Pow_2508" -> "2745 ReduceMean_2509" [label="[]", style=solid]; -"2745 ReduceMean_2509" -> "2747 Add_2511" [label="[]", style=solid]; -"2746 Constant_2510" -> "2747 Add_2511" [label="[]", style=solid]; -"2747 Add_2511" -> "2748 Sqrt_2512" [label="[]", style=solid]; -"2748 Sqrt_2512" -> "2749 Div_2513" [label="[]", style=solid]; -"2749 Div_2513" -> "2750 Mul_2514" [label="[]", style=solid]; -"2750 Mul_2514" -> "2751 Add_2515" [label="[]", style=solid]; -"2751 Add_2515" -> "2752 QuantizeLinear_3036_1" [label="[]", style=solid]; -"2752 QuantizeLinear_3036_1" -> "2753 DequantizeLinear_3036_1" [label="[]", style=dashed]; -"2753 DequantizeLinear_3036_1" -> "2754 Shape_2516" [label="[]", style=solid]; -"2753 DequantizeLinear_3036_1" -> "2757 Shape_2519" [label="[]", style=solid]; -"2753 DequantizeLinear_3036_1" -> "2760 Shape_2522" [label="[]", style=solid]; -"2753 DequantizeLinear_3036_1" -> "2765 Reshape_2527" [label="[]", style=solid]; -"2754 Shape_2516" -> "2756 Gather_2518" [label="[-1]", style=dashed]; -"2755 Constant_2517" -> "2756 Gather_2518" [label="[]", style=dashed]; -"2756 Gather_2518" -> "2769 Unsqueeze_2529" [label="[]", style=dashed]; -"2757 Shape_2519" -> "2759 Gather_2521" [label="[-1]", style=dashed]; -"2758 Constant_2520" -> "2759 Gather_2521" [label="[]", style=dashed]; -"2759 Gather_2521" -> "2770 Unsqueeze_2530" [label="[]", style=dashed]; -"2760 Shape_2522" -> "2762 Gather_2524" [label="[-1]", style=dashed]; -"2761 Constant_2523" -> "2762 Gather_2524" [label="[]", style=dashed]; -"2762 Gather_2524" -> "2763 Unsqueeze_2525" [label="[]", style=dashed]; -"2763 Unsqueeze_2525" -> "2764 Concat_2526" [label="[1]", style=dashed]; -"2764 Concat_2526" -> "2765 Reshape_2527" [label="[2]", style=dashed]; -"2765 Reshape_2527" -> "2768 Gemm_2528" [label="[]", style=solid]; -"2766 QuantizeLinear_h.11.mlp.c_fc.weight_1" -> "2767 DequantizeLinear_h.11.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; -"2767 DequantizeLinear_h.11.mlp.c_fc.weight_1" -> "2768 Gemm_2528" [label="[768, 3072]", style=solid]; -"2768 Gemm_2528" -> "2772 Reshape_2532" [label="[]", style=solid]; -"2769 Unsqueeze_2529" -> "2771 Concat_2531" [label="[1]", style=dashed]; -"2770 Unsqueeze_2530" -> "2771 Concat_2531" [label="[1]", style=dashed]; -"2771 Concat_2531" -> "2772 Reshape_2532" [label="[3]", style=dashed]; -"2772 Reshape_2532" -> "2774 Mul_2534" [label="[]", style=solid]; -"2772 Reshape_2532" -> "2776 Pow_2536" [label="[]", style=solid]; -"2772 Reshape_2532" -> "2779 Add_2539" [label="[]", style=solid]; -"2773 Constant_2533" -> "2774 Mul_2534" [label="[]", style=solid]; -"2774 Mul_2534" -> "2785 Mul_2545" [label="[]", style=solid]; -"2775 Constant_2535" -> "2776 Pow_2536" [label="[]", style=solid]; -"2776 Pow_2536" -> "2778 Mul_2538" [label="[]", style=solid]; -"2777 Constant_2537" -> "2778 Mul_2538" [label="[]", style=solid]; -"2778 Mul_2538" -> "2779 Add_2539" [label="[]", style=solid]; -"2779 Add_2539" -> "2781 Mul_2541" [label="[]", style=solid]; -"2780 Constant_2540" -> "2781 Mul_2541" [label="[]", style=solid]; -"2781 Mul_2541" -> "2782 Tanh_2542" [label="[]", style=solid]; -"2782 Tanh_2542" -> "2784 Add_2544" [label="[]", style=solid]; -"2783 Constant_2543" -> "2784 Add_2544" [label="[]", style=solid]; -"2784 Add_2544" -> "2785 Mul_2545" [label="[]", style=solid]; -"2785 Mul_2545" -> "2786 QuantizeLinear_3070_1" [label="[]", style=solid]; -"2786 QuantizeLinear_3070_1" -> "2787 DequantizeLinear_3070_1" [label="[]", style=dashed]; -"2787 DequantizeLinear_3070_1" -> "2788 Shape_2546" [label="[]", style=solid]; -"2787 DequantizeLinear_3070_1" -> "2791 Shape_2549" [label="[]", style=solid]; -"2787 DequantizeLinear_3070_1" -> "2794 Shape_2552" [label="[]", style=solid]; -"2787 DequantizeLinear_3070_1" -> "2799 Reshape_2557" [label="[]", style=solid]; -"2788 Shape_2546" -> "2790 Gather_2548" [label="[-1]", style=dashed]; -"2789 Constant_2547" -> "2790 Gather_2548" [label="[]", style=dashed]; -"2790 Gather_2548" -> "2803 Unsqueeze_2559" [label="[]", style=dashed]; -"2791 Shape_2549" -> "2793 Gather_2551" [label="[-1]", style=dashed]; -"2792 Constant_2550" -> "2793 Gather_2551" [label="[]", style=dashed]; -"2793 Gather_2551" -> "2804 Unsqueeze_2560" [label="[]", style=dashed]; -"2794 Shape_2552" -> "2796 Gather_2554" [label="[-1]", style=dashed]; -"2795 Constant_2553" -> "2796 Gather_2554" [label="[]", style=dashed]; -"2796 Gather_2554" -> "2797 Unsqueeze_2555" [label="[]", style=dashed]; -"2797 Unsqueeze_2555" -> "2798 Concat_2556" [label="[1]", style=dashed]; -"2798 Concat_2556" -> "2799 Reshape_2557" [label="[2]", style=dashed]; -"2799 Reshape_2557" -> "2802 Gemm_2558" [label="[]", style=solid]; -"2800 QuantizeLinear_h.11.mlp.c_proj.weight_1" -> "2801 DequantizeLinear_h.11.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; -"2801 DequantizeLinear_h.11.mlp.c_proj.weight_1" -> "2802 Gemm_2558" [label="[3072, 768]", style=solid]; -"2802 Gemm_2558" -> "2806 Reshape_2562" [label="[]", style=solid]; -"2803 Unsqueeze_2559" -> "2805 Concat_2561" [label="[1]", style=dashed]; -"2804 Unsqueeze_2560" -> "2805 Concat_2561" [label="[1]", style=dashed]; -"2805 Concat_2561" -> "2806 Reshape_2562" [label="[3]", style=dashed]; -"2806 Reshape_2562" -> "2807 Add_2563" [label="[]", style=solid]; -"2807 Add_2563" -> "2808 ReduceMean_2564" [label="[]", style=solid]; -"2807 Add_2563" -> "2809 Sub_2565" [label="[]", style=solid]; -"2808 ReduceMean_2564" -> "2809 Sub_2565" [label="[]", style=solid]; -"2809 Sub_2565" -> "2811 Pow_2567" [label="[]", style=solid]; -"2809 Sub_2565" -> "2816 Div_2572" [label="[]", style=solid]; -"2810 Constant_2566" -> "2811 Pow_2567" [label="[]", style=solid]; -"2811 Pow_2567" -> "2812 ReduceMean_2568" [label="[]", style=solid]; -"2812 ReduceMean_2568" -> "2814 Add_2570" [label="[]", style=solid]; -"2813 Constant_2569" -> "2814 Add_2570" [label="[]", style=solid]; -"2814 Add_2570" -> "2815 Sqrt_2571" [label="[]", style=solid]; -"2815 Sqrt_2571" -> "2816 Div_2572" [label="[]", style=solid]; -"2816 Div_2572" -> "2817 Mul_2573" [label="[]", style=solid]; -"2817 Mul_2573" -> "2818 Add_2574" [label="[]", style=solid]; -"2818 Add_2574" -> "2824 Reshape_2580" [label="[]", style=solid]; -"2819 Unsqueeze_2575" -> "2823 Concat_2579" [label="[1]", style=dashed]; -"2820 Unsqueeze_2576" -> "2823 Concat_2579" [label="[1]", style=dashed]; -"2821 Unsqueeze_2577" -> "2823 Concat_2579" [label="[1]", style=dashed]; -"2822 Unsqueeze_2578" -> "2823 Concat_2579" [label="[1]", style=dashed]; -"2823 Concat_2579" -> "2824 Reshape_2580" [label="[4]", style=dashed]; -"2824 Reshape_2580" -> "2826 nncf_model_output_0" [label="[1, 1, 8, 768]", style=solid]; -"2825 nncf_model_input_0" -> "0 Shape_0" [label="[-1, -1, -1]", style=dashed]; -"2825 nncf_model_input_0" -> "3 Shape_3" [label="[-1, -1, -1]", style=dashed]; -"2825 nncf_model_input_0" -> "6 Shape_6" [label="[-1, -1, -1]", style=dashed]; -"2825 nncf_model_input_0" -> "11 Reshape_11" [label="[-1, -1, -1]", style=dashed]; +"8 Gather_8" -> "10 Unsqueeze_9" [label="[]", style=dashed]; +"8 Gather_8" -> "14 Unsqueeze_12" [label="[]", style=dashed]; +"8 Gather_8" -> "28 Unsqueeze_23" [label="[]", style=dashed]; +"8 Gather_8" -> "3225 Unsqueeze_2577" [label="[]", style=dashed]; +"9 Constant_nncf_9" -> "10 Unsqueeze_9" [label="[1]", style=dashed]; +"10 Unsqueeze_9" -> "11 Concat_10" [label="[1]", style=dashed]; +"11 Concat_10" -> "12 Reshape_11" [label="[2]", style=dashed]; +"12 Reshape_11" -> "33 Gather_26" [label="[]", style=dashed]; +"13 Constant_nncf_13" -> "14 Unsqueeze_12" [label="[1]", style=dashed]; +"14 Unsqueeze_12" -> "15 Sub_13" [label="[1]", style=dashed]; +"15 Sub_13" -> "16 Div_14" [label="[1]", style=dashed]; +"16 Div_14" -> "17 ConstantOfShape_15" [label="[1]", style=dashed]; +"17 ConstantOfShape_15" -> "18 NonZero_16" [label="[-1]", style=dashed]; +"18 NonZero_16" -> "19 Transpose_17" [label="[1, -1]", style=dashed]; +"19 Transpose_17" -> "21 Squeeze_18" [label="[-1, 1]", style=dashed]; +"20 Constant_nncf_20" -> "21 Squeeze_18" [label="[1]", style=dashed]; +"21 Squeeze_18" -> "22 Mul_19" [label="[-1]", style=dashed]; +"22 Mul_19" -> "23 Add_20" [label="[-1]", style=dashed]; +"23 Add_20" -> "24 Cast_21" [label="[-1]", style=dashed]; +"24 Cast_21" -> "26 Unsqueeze_22" [label="[-1]", style=dashed]; +"25 Constant_nncf_25" -> "26 Unsqueeze_22" [label="[1]", style=dashed]; +"26 Unsqueeze_22" -> "30 Reshape_25" [label="[1, -1]", style=dashed]; +"27 Constant_nncf_27" -> "28 Unsqueeze_23" [label="[1]", style=dashed]; +"28 Unsqueeze_23" -> "29 Concat_24" [label="[1]", style=dashed]; +"29 Concat_24" -> "30 Reshape_25" [label="[2]", style=dashed]; +"30 Reshape_25" -> "36 Gather_27" [label="[]", style=dashed]; +"31 QuantizeLinear_wte.weight_1" -> "32 DequantizeLinear_wte.weight_1" [label="[50257, 768]", style=dashed]; +"32 DequantizeLinear_wte.weight_1" -> "33 Gather_26" [label="[50257, 768]", style=solid]; +"33 Gather_26" -> "37 Add_28" [label="[]", style=solid]; +"34 QuantizeLinear_wpe.weight_1" -> "35 DequantizeLinear_wpe.weight_1" [label="[1024, 768]", style=dashed]; +"35 DequantizeLinear_wpe.weight_1" -> "36 Gather_27" [label="[1024, 768]", style=solid]; +"36 Gather_27" -> "37 Add_28" [label="[]", style=solid]; +"37 Add_28" -> "38 Shape_29" [label="[]", style=solid]; +"37 Add_28" -> "41 ReduceMean_32" [label="[]", style=solid]; +"37 Add_28" -> "42 Sub_33" [label="[]", style=solid]; +"37 Add_28" -> "231 Add_183" [label="[]", style=solid]; +"38 Shape_29" -> "40 Gather_31" [label="[-1]", style=dashed]; +"39 Constant_30" -> "40 Gather_31" [label="[]", style=dashed]; +"40 Gather_31" -> "3227 Unsqueeze_2578" [label="[]", style=dashed]; +"41 ReduceMean_32" -> "42 Sub_33" [label="[]", style=solid]; +"42 Sub_33" -> "44 Pow_35" [label="[]", style=solid]; +"42 Sub_33" -> "49 Div_40" [label="[]", style=solid]; +"43 Constant_34" -> "44 Pow_35" [label="[]", style=solid]; +"44 Pow_35" -> "45 ReduceMean_36" [label="[]", style=solid]; +"45 ReduceMean_36" -> "47 Add_38" [label="[]", style=solid]; +"46 Constant_37" -> "47 Add_38" [label="[]", style=solid]; +"47 Add_38" -> "48 Sqrt_39" [label="[]", style=solid]; +"48 Sqrt_39" -> "49 Div_40" [label="[]", style=solid]; +"49 Div_40" -> "50 Mul_41" [label="[]", style=solid]; +"50 Mul_41" -> "51 Add_42" [label="[]", style=solid]; +"51 Add_42" -> "52 QuantizeLinear_211_1" [label="[]", style=solid]; +"52 QuantizeLinear_211_1" -> "53 DequantizeLinear_211_1" [label="[]", style=dashed]; +"53 DequantizeLinear_211_1" -> "54 Shape_43" [label="[]", style=solid]; +"53 DequantizeLinear_211_1" -> "57 Shape_46" [label="[]", style=solid]; +"53 DequantizeLinear_211_1" -> "60 Shape_49" [label="[]", style=solid]; +"53 DequantizeLinear_211_1" -> "66 Reshape_54" [label="[]", style=solid]; +"54 Shape_43" -> "56 Gather_45" [label="[-1]", style=dashed]; +"55 Constant_44" -> "56 Gather_45" [label="[]", style=dashed]; +"56 Gather_45" -> "71 Unsqueeze_56" [label="[]", style=dashed]; +"57 Shape_46" -> "59 Gather_48" [label="[-1]", style=dashed]; +"58 Constant_47" -> "59 Gather_48" [label="[]", style=dashed]; +"59 Gather_48" -> "73 Unsqueeze_57" [label="[]", style=dashed]; +"60 Shape_49" -> "62 Gather_51" [label="[-1]", style=dashed]; +"61 Constant_50" -> "62 Gather_51" [label="[]", style=dashed]; +"62 Gather_51" -> "64 Unsqueeze_52" [label="[]", style=dashed]; +"63 Constant_nncf_57" -> "64 Unsqueeze_52" [label="[1]", style=dashed]; +"64 Unsqueeze_52" -> "65 Concat_53" [label="[1]", style=dashed]; +"65 Concat_53" -> "66 Reshape_54" [label="[2]", style=dashed]; +"66 Reshape_54" -> "69 Gemm_55" [label="[]", style=solid]; +"67 QuantizeLinear_h.0.attn.c_attn.weight_1" -> "68 DequantizeLinear_h.0.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"68 DequantizeLinear_h.0.attn.c_attn.weight_1" -> "69 Gemm_55" [label="[768, 2304]", style=solid]; +"69 Gemm_55" -> "75 Reshape_59" [label="[]", style=solid]; +"70 Constant_nncf_62" -> "71 Unsqueeze_56" [label="[1]", style=dashed]; +"71 Unsqueeze_56" -> "74 Concat_58" [label="[1]", style=dashed]; +"72 Constant_nncf_64" -> "73 Unsqueeze_57" [label="[1]", style=dashed]; +"73 Unsqueeze_57" -> "74 Concat_58" [label="[1]", style=dashed]; +"74 Concat_58" -> "75 Reshape_59" [label="[3]", style=dashed]; +"75 Reshape_59" -> "77 Split_60" [label="[]", style=solid]; +"76 Constant_nncf_68" -> "77 Split_60" [label="[3]", style=dashed]; +"77 Split_60" -> "78 QuantizeLinear_query.1_1" [label="[]", style=solid]; +"77 Split_60" -> "80 Shape_61" [label="[]", style=solid]; +"77 Split_60" -> "83 Shape_64" [label="[]", style=solid]; +"77 Split_60" -> "86 Shape_67" [label="[]", style=solid]; +"77 Split_60" -> "102 Shape_80" [label="[]", style=solid]; +"77 Split_60" -> "105 Shape_83" [label="[]", style=solid]; +"77 Split_60" -> "108 Shape_86" [label="[]", style=solid]; +"77 Split_60" -> "122 Reshape_97" [label="[]", style=solid]; +"77 Split_60" -> "126 Shape_99" [label="[]", style=solid]; +"77 Split_60" -> "129 Shape_102" [label="[]", style=solid]; +"77 Split_60" -> "132 Shape_105" [label="[]", style=solid]; +"77 Split_60" -> "146 Reshape_116" [label="[]", style=solid]; +"78 QuantizeLinear_query.1_1" -> "79 DequantizeLinear_query.1_1" [label="[]", style=dashed]; +"79 DequantizeLinear_query.1_1" -> "100 Reshape_78" [label="[]", style=solid]; +"80 Shape_61" -> "82 Gather_63" [label="[-1]", style=dashed]; +"81 Constant_62" -> "82 Gather_63" [label="[]", style=dashed]; +"82 Gather_63" -> "94 Unsqueeze_74" [label="[]", style=dashed]; +"83 Shape_64" -> "85 Gather_66" [label="[-1]", style=dashed]; +"84 Constant_65" -> "85 Gather_66" [label="[]", style=dashed]; +"85 Gather_66" -> "96 Unsqueeze_75" [label="[]", style=dashed]; +"86 Shape_67" -> "88 Gather_69" [label="[-1]", style=dashed]; +"87 Constant_68" -> "88 Gather_69" [label="[]", style=dashed]; +"88 Gather_69" -> "90 Div_71" [label="[]", style=dashed]; +"89 Constant_70" -> "90 Div_71" [label="[]", style=dashed]; +"90 Div_71" -> "91 Cast_72" [label="[]", style=dashed]; +"91 Cast_72" -> "92 Cast_73" [label="[]", style=dashed]; +"92 Cast_73" -> "98 Unsqueeze_76" [label="[]", style=dashed]; +"93 Constant_nncf_83" -> "94 Unsqueeze_74" [label="[1]", style=dashed]; +"94 Unsqueeze_74" -> "99 Concat_77" [label="[1]", style=dashed]; +"95 Constant_nncf_85" -> "96 Unsqueeze_75" [label="[1]", style=dashed]; +"96 Unsqueeze_75" -> "99 Concat_77" [label="[1]", style=dashed]; +"97 Constant_nncf_87" -> "98 Unsqueeze_76" [label="[1]", style=dashed]; +"98 Unsqueeze_76" -> "99 Concat_77" [label="[1]", style=dashed]; +"99 Concat_77" -> "100 Reshape_78" [label="[4]", style=dashed]; +"100 Reshape_78" -> "101 Transpose_79" [label="[]", style=solid]; +"101 Transpose_79" -> "154 MatMul_122" [label="[]", style=solid]; +"102 Shape_80" -> "104 Gather_82" [label="[-1]", style=dashed]; +"103 Constant_81" -> "104 Gather_82" [label="[]", style=dashed]; +"104 Gather_82" -> "116 Unsqueeze_93" [label="[]", style=dashed]; +"105 Shape_83" -> "107 Gather_85" [label="[-1]", style=dashed]; +"106 Constant_84" -> "107 Gather_85" [label="[]", style=dashed]; +"107 Gather_85" -> "118 Unsqueeze_94" [label="[]", style=dashed]; +"108 Shape_86" -> "110 Gather_88" [label="[-1]", style=dashed]; +"109 Constant_87" -> "110 Gather_88" [label="[]", style=dashed]; +"110 Gather_88" -> "112 Div_90" [label="[]", style=dashed]; +"111 Constant_89" -> "112 Div_90" [label="[]", style=dashed]; +"112 Div_90" -> "113 Cast_91" [label="[]", style=dashed]; +"113 Cast_91" -> "114 Cast_92" [label="[]", style=dashed]; +"114 Cast_92" -> "120 Unsqueeze_95" [label="[]", style=dashed]; +"115 Constant_nncf_105" -> "116 Unsqueeze_93" [label="[1]", style=dashed]; +"116 Unsqueeze_93" -> "121 Concat_96" [label="[1]", style=dashed]; +"117 Constant_nncf_107" -> "118 Unsqueeze_94" [label="[1]", style=dashed]; +"118 Unsqueeze_94" -> "121 Concat_96" [label="[1]", style=dashed]; +"119 Constant_nncf_109" -> "120 Unsqueeze_95" [label="[1]", style=dashed]; +"120 Unsqueeze_95" -> "121 Concat_96" [label="[1]", style=dashed]; +"121 Concat_96" -> "122 Reshape_97" [label="[4]", style=dashed]; +"122 Reshape_97" -> "123 QuantizeLinear_276_1" [label="[]", style=solid]; +"122 Reshape_97" -> "148 Transpose_118" [label="[]", style=solid]; +"123 QuantizeLinear_276_1" -> "124 DequantizeLinear_276_1" [label="[]", style=dashed]; +"124 DequantizeLinear_276_1" -> "125 Transpose_98" [label="[]", style=solid]; +"125 Transpose_98" -> "154 MatMul_122" [label="[]", style=solid]; +"126 Shape_99" -> "128 Gather_101" [label="[-1]", style=dashed]; +"127 Constant_100" -> "128 Gather_101" [label="[]", style=dashed]; +"128 Gather_101" -> "140 Unsqueeze_112" [label="[]", style=dashed]; +"129 Shape_102" -> "131 Gather_104" [label="[-1]", style=dashed]; +"130 Constant_103" -> "131 Gather_104" [label="[]", style=dashed]; +"131 Gather_104" -> "142 Unsqueeze_113" [label="[]", style=dashed]; +"132 Shape_105" -> "134 Gather_107" [label="[-1]", style=dashed]; +"133 Constant_106" -> "134 Gather_107" [label="[]", style=dashed]; +"134 Gather_107" -> "136 Div_109" [label="[]", style=dashed]; +"135 Constant_108" -> "136 Div_109" [label="[]", style=dashed]; +"136 Div_109" -> "137 Cast_110" [label="[]", style=dashed]; +"137 Cast_110" -> "138 Cast_111" [label="[]", style=dashed]; +"138 Cast_111" -> "144 Unsqueeze_114" [label="[]", style=dashed]; +"139 Constant_nncf_127" -> "140 Unsqueeze_112" [label="[1]", style=dashed]; +"140 Unsqueeze_112" -> "145 Concat_115" [label="[1]", style=dashed]; +"141 Constant_nncf_129" -> "142 Unsqueeze_113" [label="[1]", style=dashed]; +"142 Unsqueeze_113" -> "145 Concat_115" [label="[1]", style=dashed]; +"143 Constant_nncf_131" -> "144 Unsqueeze_114" [label="[1]", style=dashed]; +"144 Unsqueeze_114" -> "145 Concat_115" [label="[1]", style=dashed]; +"145 Concat_115" -> "146 Reshape_116" [label="[4]", style=dashed]; +"146 Reshape_116" -> "147 Transpose_117" [label="[]", style=solid]; +"147 Transpose_117" -> "152 Unsqueeze_120" [label="[]", style=solid]; +"147 Transpose_117" -> "184 MatMul_146" [label="[]", style=solid]; +"148 Transpose_118" -> "150 Unsqueeze_119" [label="[]", style=solid]; +"149 Constant_nncf_137" -> "150 Unsqueeze_119" [label="[1]", style=dashed]; +"150 Unsqueeze_119" -> "153 Concat_121" [label="[]", style=solid]; +"151 Constant_nncf_139" -> "152 Unsqueeze_120" [label="[1]", style=dashed]; +"152 Unsqueeze_120" -> "153 Concat_121" [label="[]", style=solid]; +"153 Concat_121" -> "3232 nncf_model_output_1" [label="[2, 1, 12, 8, 64]", style=solid]; +"154 MatMul_122" -> "156 Div_124" [label="[]", style=solid]; +"155 Constant_123" -> "156 Div_124" [label="[]", style=solid]; +"156 Div_124" -> "157 Shape_125" [label="[]", style=solid]; +"156 Div_124" -> "160 Shape_128" [label="[]", style=solid]; +"156 Div_124" -> "174 Mul_139" [label="[]", style=solid]; +"157 Shape_125" -> "159 Gather_127" [label="[-1]", style=dashed]; +"158 Constant_126" -> "159 Gather_127" [label="[]", style=dashed]; +"159 Gather_127" -> "163 Sub_131" [label="[]", style=dashed]; +"160 Shape_128" -> "162 Gather_130" [label="[-1]", style=dashed]; +"161 Constant_129" -> "162 Gather_130" [label="[]", style=dashed]; +"162 Gather_130" -> "163 Sub_131" [label="[]", style=dashed]; +"162 Gather_130" -> "167 Unsqueeze_133" [label="[]", style=dashed]; +"162 Gather_130" -> "171 Unsqueeze_136" [label="[]", style=dashed]; +"163 Sub_131" -> "165 Unsqueeze_132" [label="[]", style=dashed]; +"164 Constant_nncf_152" -> "165 Unsqueeze_132" [label="[1]", style=dashed]; +"165 Unsqueeze_132" -> "169 Slice_135" [label="[1]", style=dashed]; +"166 Constant_nncf_154" -> "167 Unsqueeze_133" [label="[1]", style=dashed]; +"167 Unsqueeze_133" -> "169 Slice_135" [label="[1]", style=dashed]; +"168 Constant_134" -> "169 Slice_135" [label="[1]", style=dashed]; +"169 Slice_135" -> "173 Slice_138" [label="[-1, -1, -1, -1]", style=solid]; +"170 Constant_nncf_158" -> "171 Unsqueeze_136" [label="[1]", style=dashed]; +"171 Unsqueeze_136" -> "173 Slice_138" [label="[1]", style=dashed]; +"172 Constant_137" -> "173 Slice_138" [label="[1]", style=dashed]; +"173 Slice_138" -> "174 Mul_139" [label="[-1, -1, -1, -1]", style=solid]; +"173 Slice_138" -> "176 Sub_141" [label="[-1, -1, -1, -1]", style=solid]; +"174 Mul_139" -> "179 Sub_144" [label="[]", style=solid]; +"175 Constant_140" -> "176 Sub_141" [label="[]", style=solid]; +"176 Sub_141" -> "178 Mul_143" [label="[-1, -1, -1, -1]", style=solid]; +"177 Constant_142" -> "178 Mul_143" [label="[]", style=solid]; +"178 Mul_143" -> "179 Sub_144" [label="[-1, -1, -1, -1]", style=solid]; +"179 Sub_144" -> "180 Shape_nncf_168" [label="[]", style=solid]; +"179 Sub_144" -> "181 Flatten_nncf_169" [label="[]", style=solid]; +"180 Shape_nncf_168" -> "183 Reshape_nncf_171" [label="[-1]", style=dashed]; +"181 Flatten_nncf_169" -> "182 Softmax_145" [label="[]", style=solid]; +"182 Softmax_145" -> "183 Reshape_nncf_171" [label="[]", style=solid]; +"183 Reshape_nncf_171" -> "184 MatMul_146" [label="[]", style=solid]; +"184 MatMul_146" -> "185 QuantizeLinear_333_1" [label="[]", style=solid]; +"185 QuantizeLinear_333_1" -> "186 DequantizeLinear_333_1" [label="[]", style=dashed]; +"186 DequantizeLinear_333_1" -> "187 Transpose_147" [label="[]", style=solid]; +"187 Transpose_147" -> "188 Shape_148" [label="[]", style=solid]; +"187 Transpose_147" -> "191 Shape_151" [label="[]", style=solid]; +"187 Transpose_147" -> "194 Shape_154" [label="[]", style=solid]; +"187 Transpose_147" -> "197 Shape_157" [label="[]", style=solid]; +"187 Transpose_147" -> "208 Reshape_165" [label="[]", style=solid]; +"188 Shape_148" -> "190 Gather_150" [label="[-1]", style=dashed]; +"189 Constant_149" -> "190 Gather_150" [label="[]", style=dashed]; +"190 Gather_150" -> "202 Unsqueeze_161" [label="[]", style=dashed]; +"191 Shape_151" -> "193 Gather_153" [label="[-1]", style=dashed]; +"192 Constant_152" -> "193 Gather_153" [label="[]", style=dashed]; +"193 Gather_153" -> "204 Unsqueeze_162" [label="[]", style=dashed]; +"194 Shape_154" -> "196 Gather_156" [label="[-1]", style=dashed]; +"195 Constant_155" -> "196 Gather_156" [label="[]", style=dashed]; +"196 Gather_156" -> "200 Mul_160" [label="[]", style=dashed]; +"197 Shape_157" -> "199 Gather_159" [label="[-1]", style=dashed]; +"198 Constant_158" -> "199 Gather_159" [label="[]", style=dashed]; +"199 Gather_159" -> "200 Mul_160" [label="[]", style=dashed]; +"200 Mul_160" -> "206 Unsqueeze_163" [label="[]", style=dashed]; +"201 Constant_nncf_187" -> "202 Unsqueeze_161" [label="[1]", style=dashed]; +"202 Unsqueeze_161" -> "207 Concat_164" [label="[1]", style=dashed]; +"203 Constant_nncf_189" -> "204 Unsqueeze_162" [label="[1]", style=dashed]; +"204 Unsqueeze_162" -> "207 Concat_164" [label="[1]", style=dashed]; +"205 Constant_nncf_191" -> "206 Unsqueeze_163" [label="[1]", style=dashed]; +"206 Unsqueeze_163" -> "207 Concat_164" [label="[1]", style=dashed]; +"207 Concat_164" -> "208 Reshape_165" [label="[3]", style=dashed]; +"208 Reshape_165" -> "209 Shape_166" [label="[]", style=solid]; +"208 Reshape_165" -> "212 Shape_169" [label="[]", style=solid]; +"208 Reshape_165" -> "215 Shape_172" [label="[]", style=solid]; +"208 Reshape_165" -> "221 Reshape_177" [label="[]", style=solid]; +"209 Shape_166" -> "211 Gather_168" [label="[-1]", style=dashed]; +"210 Constant_167" -> "211 Gather_168" [label="[]", style=dashed]; +"211 Gather_168" -> "226 Unsqueeze_179" [label="[]", style=dashed]; +"212 Shape_169" -> "214 Gather_171" [label="[-1]", style=dashed]; +"213 Constant_170" -> "214 Gather_171" [label="[]", style=dashed]; +"214 Gather_171" -> "228 Unsqueeze_180" [label="[]", style=dashed]; +"215 Shape_172" -> "217 Gather_174" [label="[-1]", style=dashed]; +"216 Constant_173" -> "217 Gather_174" [label="[]", style=dashed]; +"217 Gather_174" -> "219 Unsqueeze_175" [label="[]", style=dashed]; +"218 Constant_nncf_204" -> "219 Unsqueeze_175" [label="[1]", style=dashed]; +"219 Unsqueeze_175" -> "220 Concat_176" [label="[1]", style=dashed]; +"220 Concat_176" -> "221 Reshape_177" [label="[2]", style=dashed]; +"221 Reshape_177" -> "224 Gemm_178" [label="[]", style=solid]; +"222 QuantizeLinear_h.0.attn.c_proj.weight_1" -> "223 DequantizeLinear_h.0.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"223 DequantizeLinear_h.0.attn.c_proj.weight_1" -> "224 Gemm_178" [label="[768, 768]", style=solid]; +"224 Gemm_178" -> "230 Reshape_182" [label="[]", style=solid]; +"225 Constant_nncf_209" -> "226 Unsqueeze_179" [label="[1]", style=dashed]; +"226 Unsqueeze_179" -> "229 Concat_181" [label="[1]", style=dashed]; +"227 Constant_nncf_211" -> "228 Unsqueeze_180" [label="[1]", style=dashed]; +"228 Unsqueeze_180" -> "229 Concat_181" [label="[1]", style=dashed]; +"229 Concat_181" -> "230 Reshape_182" [label="[3]", style=dashed]; +"230 Reshape_182" -> "231 Add_183" [label="[]", style=solid]; +"231 Add_183" -> "232 ReduceMean_184" [label="[]", style=solid]; +"231 Add_183" -> "233 Sub_185" [label="[]", style=solid]; +"231 Add_183" -> "304 Add_242" [label="[]", style=solid]; +"232 ReduceMean_184" -> "233 Sub_185" [label="[]", style=solid]; +"233 Sub_185" -> "235 Pow_187" [label="[]", style=solid]; +"233 Sub_185" -> "240 Div_192" [label="[]", style=solid]; +"234 Constant_186" -> "235 Pow_187" [label="[]", style=solid]; +"235 Pow_187" -> "236 ReduceMean_188" [label="[]", style=solid]; +"236 ReduceMean_188" -> "238 Add_190" [label="[]", style=solid]; +"237 Constant_189" -> "238 Add_190" [label="[]", style=solid]; +"238 Add_190" -> "239 Sqrt_191" [label="[]", style=solid]; +"239 Sqrt_191" -> "240 Div_192" [label="[]", style=solid]; +"240 Div_192" -> "241 Mul_193" [label="[]", style=solid]; +"241 Mul_193" -> "242 Add_194" [label="[]", style=solid]; +"242 Add_194" -> "243 QuantizeLinear_385_1" [label="[]", style=solid]; +"243 QuantizeLinear_385_1" -> "244 DequantizeLinear_385_1" [label="[]", style=dashed]; +"244 DequantizeLinear_385_1" -> "245 Shape_195" [label="[]", style=solid]; +"244 DequantizeLinear_385_1" -> "248 Shape_198" [label="[]", style=solid]; +"244 DequantizeLinear_385_1" -> "251 Shape_201" [label="[]", style=solid]; +"244 DequantizeLinear_385_1" -> "257 Reshape_206" [label="[]", style=solid]; +"245 Shape_195" -> "247 Gather_197" [label="[-1]", style=dashed]; +"246 Constant_196" -> "247 Gather_197" [label="[]", style=dashed]; +"247 Gather_197" -> "262 Unsqueeze_208" [label="[]", style=dashed]; +"248 Shape_198" -> "250 Gather_200" [label="[-1]", style=dashed]; +"249 Constant_199" -> "250 Gather_200" [label="[]", style=dashed]; +"250 Gather_200" -> "264 Unsqueeze_209" [label="[]", style=dashed]; +"251 Shape_201" -> "253 Gather_203" [label="[-1]", style=dashed]; +"252 Constant_202" -> "253 Gather_203" [label="[]", style=dashed]; +"253 Gather_203" -> "255 Unsqueeze_204" [label="[]", style=dashed]; +"254 Constant_nncf_236" -> "255 Unsqueeze_204" [label="[1]", style=dashed]; +"255 Unsqueeze_204" -> "256 Concat_205" [label="[1]", style=dashed]; +"256 Concat_205" -> "257 Reshape_206" [label="[2]", style=dashed]; +"257 Reshape_206" -> "260 Gemm_207" [label="[]", style=solid]; +"258 QuantizeLinear_h.0.mlp.c_fc.weight_1" -> "259 DequantizeLinear_h.0.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"259 DequantizeLinear_h.0.mlp.c_fc.weight_1" -> "260 Gemm_207" [label="[768, 3072]", style=solid]; +"260 Gemm_207" -> "266 Reshape_211" [label="[]", style=solid]; +"261 Constant_nncf_241" -> "262 Unsqueeze_208" [label="[1]", style=dashed]; +"262 Unsqueeze_208" -> "265 Concat_210" [label="[1]", style=dashed]; +"263 Constant_nncf_243" -> "264 Unsqueeze_209" [label="[1]", style=dashed]; +"264 Unsqueeze_209" -> "265 Concat_210" [label="[1]", style=dashed]; +"265 Concat_210" -> "266 Reshape_211" [label="[3]", style=dashed]; +"266 Reshape_211" -> "268 Mul_213" [label="[]", style=solid]; +"266 Reshape_211" -> "270 Pow_215" [label="[]", style=solid]; +"266 Reshape_211" -> "273 Add_218" [label="[]", style=solid]; +"267 Constant_212" -> "268 Mul_213" [label="[]", style=solid]; +"268 Mul_213" -> "279 Mul_224" [label="[]", style=solid]; +"269 Constant_214" -> "270 Pow_215" [label="[]", style=solid]; +"270 Pow_215" -> "272 Mul_217" [label="[]", style=solid]; +"271 Constant_216" -> "272 Mul_217" [label="[]", style=solid]; +"272 Mul_217" -> "273 Add_218" [label="[]", style=solid]; +"273 Add_218" -> "275 Mul_220" [label="[]", style=solid]; +"274 Constant_219" -> "275 Mul_220" [label="[]", style=solid]; +"275 Mul_220" -> "276 Tanh_221" [label="[]", style=solid]; +"276 Tanh_221" -> "278 Add_223" [label="[]", style=solid]; +"277 Constant_222" -> "278 Add_223" [label="[]", style=solid]; +"278 Add_223" -> "279 Mul_224" [label="[]", style=solid]; +"279 Mul_224" -> "280 QuantizeLinear_419_1" [label="[]", style=solid]; +"280 QuantizeLinear_419_1" -> "281 DequantizeLinear_419_1" [label="[]", style=dashed]; +"281 DequantizeLinear_419_1" -> "282 Shape_225" [label="[]", style=solid]; +"281 DequantizeLinear_419_1" -> "285 Shape_228" [label="[]", style=solid]; +"281 DequantizeLinear_419_1" -> "288 Shape_231" [label="[]", style=solid]; +"281 DequantizeLinear_419_1" -> "294 Reshape_236" [label="[]", style=solid]; +"282 Shape_225" -> "284 Gather_227" [label="[-1]", style=dashed]; +"283 Constant_226" -> "284 Gather_227" [label="[]", style=dashed]; +"284 Gather_227" -> "299 Unsqueeze_238" [label="[]", style=dashed]; +"285 Shape_228" -> "287 Gather_230" [label="[-1]", style=dashed]; +"286 Constant_229" -> "287 Gather_230" [label="[]", style=dashed]; +"287 Gather_230" -> "301 Unsqueeze_239" [label="[]", style=dashed]; +"288 Shape_231" -> "290 Gather_233" [label="[-1]", style=dashed]; +"289 Constant_232" -> "290 Gather_233" [label="[]", style=dashed]; +"290 Gather_233" -> "292 Unsqueeze_234" [label="[]", style=dashed]; +"291 Constant_nncf_269" -> "292 Unsqueeze_234" [label="[1]", style=dashed]; +"292 Unsqueeze_234" -> "293 Concat_235" [label="[1]", style=dashed]; +"293 Concat_235" -> "294 Reshape_236" [label="[2]", style=dashed]; +"294 Reshape_236" -> "297 Gemm_237" [label="[]", style=solid]; +"295 QuantizeLinear_h.0.mlp.c_proj.weight_1" -> "296 DequantizeLinear_h.0.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"296 DequantizeLinear_h.0.mlp.c_proj.weight_1" -> "297 Gemm_237" [label="[3072, 768]", style=solid]; +"297 Gemm_237" -> "303 Reshape_241" [label="[]", style=solid]; +"298 Constant_nncf_274" -> "299 Unsqueeze_238" [label="[1]", style=dashed]; +"299 Unsqueeze_238" -> "302 Concat_240" [label="[1]", style=dashed]; +"300 Constant_nncf_276" -> "301 Unsqueeze_239" [label="[1]", style=dashed]; +"301 Unsqueeze_239" -> "302 Concat_240" [label="[1]", style=dashed]; +"302 Concat_240" -> "303 Reshape_241" [label="[3]", style=dashed]; +"303 Reshape_241" -> "304 Add_242" [label="[]", style=solid]; +"304 Add_242" -> "305 ReduceMean_243" [label="[]", style=solid]; +"304 Add_242" -> "306 Sub_244" [label="[]", style=solid]; +"304 Add_242" -> "495 Add_394" [label="[]", style=solid]; +"305 ReduceMean_243" -> "306 Sub_244" [label="[]", style=solid]; +"306 Sub_244" -> "308 Pow_246" [label="[]", style=solid]; +"306 Sub_244" -> "313 Div_251" [label="[]", style=solid]; +"307 Constant_245" -> "308 Pow_246" [label="[]", style=solid]; +"308 Pow_246" -> "309 ReduceMean_247" [label="[]", style=solid]; +"309 ReduceMean_247" -> "311 Add_249" [label="[]", style=solid]; +"310 Constant_248" -> "311 Add_249" [label="[]", style=solid]; +"311 Add_249" -> "312 Sqrt_250" [label="[]", style=solid]; +"312 Sqrt_250" -> "313 Div_251" [label="[]", style=solid]; +"313 Div_251" -> "314 Mul_252" [label="[]", style=solid]; +"314 Mul_252" -> "315 Add_253" [label="[]", style=solid]; +"315 Add_253" -> "316 QuantizeLinear_452_1" [label="[]", style=solid]; +"316 QuantizeLinear_452_1" -> "317 DequantizeLinear_452_1" [label="[]", style=dashed]; +"317 DequantizeLinear_452_1" -> "318 Shape_254" [label="[]", style=solid]; +"317 DequantizeLinear_452_1" -> "321 Shape_257" [label="[]", style=solid]; +"317 DequantizeLinear_452_1" -> "324 Shape_260" [label="[]", style=solid]; +"317 DequantizeLinear_452_1" -> "330 Reshape_265" [label="[]", style=solid]; +"318 Shape_254" -> "320 Gather_256" [label="[-1]", style=dashed]; +"319 Constant_255" -> "320 Gather_256" [label="[]", style=dashed]; +"320 Gather_256" -> "335 Unsqueeze_267" [label="[]", style=dashed]; +"321 Shape_257" -> "323 Gather_259" [label="[-1]", style=dashed]; +"322 Constant_258" -> "323 Gather_259" [label="[]", style=dashed]; +"323 Gather_259" -> "337 Unsqueeze_268" [label="[]", style=dashed]; +"324 Shape_260" -> "326 Gather_262" [label="[-1]", style=dashed]; +"325 Constant_261" -> "326 Gather_262" [label="[]", style=dashed]; +"326 Gather_262" -> "328 Unsqueeze_263" [label="[]", style=dashed]; +"327 Constant_nncf_301" -> "328 Unsqueeze_263" [label="[1]", style=dashed]; +"328 Unsqueeze_263" -> "329 Concat_264" [label="[1]", style=dashed]; +"329 Concat_264" -> "330 Reshape_265" [label="[2]", style=dashed]; +"330 Reshape_265" -> "333 Gemm_266" [label="[]", style=solid]; +"331 QuantizeLinear_h.1.attn.c_attn.weight_1" -> "332 DequantizeLinear_h.1.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"332 DequantizeLinear_h.1.attn.c_attn.weight_1" -> "333 Gemm_266" [label="[768, 2304]", style=solid]; +"333 Gemm_266" -> "339 Reshape_270" [label="[]", style=solid]; +"334 Constant_nncf_306" -> "335 Unsqueeze_267" [label="[1]", style=dashed]; +"335 Unsqueeze_267" -> "338 Concat_269" [label="[1]", style=dashed]; +"336 Constant_nncf_308" -> "337 Unsqueeze_268" [label="[1]", style=dashed]; +"337 Unsqueeze_268" -> "338 Concat_269" [label="[1]", style=dashed]; +"338 Concat_269" -> "339 Reshape_270" [label="[3]", style=dashed]; +"339 Reshape_270" -> "341 Split_271" [label="[]", style=solid]; +"340 Constant_nncf_312" -> "341 Split_271" [label="[3]", style=dashed]; +"341 Split_271" -> "342 QuantizeLinear_query.3_1" [label="[]", style=solid]; +"341 Split_271" -> "344 Shape_272" [label="[]", style=solid]; +"341 Split_271" -> "347 Shape_275" [label="[]", style=solid]; +"341 Split_271" -> "350 Shape_278" [label="[]", style=solid]; +"341 Split_271" -> "366 Shape_291" [label="[]", style=solid]; +"341 Split_271" -> "369 Shape_294" [label="[]", style=solid]; +"341 Split_271" -> "372 Shape_297" [label="[]", style=solid]; +"341 Split_271" -> "386 Reshape_308" [label="[]", style=solid]; +"341 Split_271" -> "390 Shape_310" [label="[]", style=solid]; +"341 Split_271" -> "393 Shape_313" [label="[]", style=solid]; +"341 Split_271" -> "396 Shape_316" [label="[]", style=solid]; +"341 Split_271" -> "410 Reshape_327" [label="[]", style=solid]; +"342 QuantizeLinear_query.3_1" -> "343 DequantizeLinear_query.3_1" [label="[]", style=dashed]; +"343 DequantizeLinear_query.3_1" -> "364 Reshape_289" [label="[]", style=solid]; +"344 Shape_272" -> "346 Gather_274" [label="[-1]", style=dashed]; +"345 Constant_273" -> "346 Gather_274" [label="[]", style=dashed]; +"346 Gather_274" -> "358 Unsqueeze_285" [label="[]", style=dashed]; +"347 Shape_275" -> "349 Gather_277" [label="[-1]", style=dashed]; +"348 Constant_276" -> "349 Gather_277" [label="[]", style=dashed]; +"349 Gather_277" -> "360 Unsqueeze_286" [label="[]", style=dashed]; +"350 Shape_278" -> "352 Gather_280" [label="[-1]", style=dashed]; +"351 Constant_279" -> "352 Gather_280" [label="[]", style=dashed]; +"352 Gather_280" -> "354 Div_282" [label="[]", style=dashed]; +"353 Constant_281" -> "354 Div_282" [label="[]", style=dashed]; +"354 Div_282" -> "355 Cast_283" [label="[]", style=dashed]; +"355 Cast_283" -> "356 Cast_284" [label="[]", style=dashed]; +"356 Cast_284" -> "362 Unsqueeze_287" [label="[]", style=dashed]; +"357 Constant_nncf_327" -> "358 Unsqueeze_285" [label="[1]", style=dashed]; +"358 Unsqueeze_285" -> "363 Concat_288" [label="[1]", style=dashed]; +"359 Constant_nncf_329" -> "360 Unsqueeze_286" [label="[1]", style=dashed]; +"360 Unsqueeze_286" -> "363 Concat_288" [label="[1]", style=dashed]; +"361 Constant_nncf_331" -> "362 Unsqueeze_287" [label="[1]", style=dashed]; +"362 Unsqueeze_287" -> "363 Concat_288" [label="[1]", style=dashed]; +"363 Concat_288" -> "364 Reshape_289" [label="[4]", style=dashed]; +"364 Reshape_289" -> "365 Transpose_290" [label="[]", style=solid]; +"365 Transpose_290" -> "418 MatMul_333" [label="[]", style=solid]; +"366 Shape_291" -> "368 Gather_293" [label="[-1]", style=dashed]; +"367 Constant_292" -> "368 Gather_293" [label="[]", style=dashed]; +"368 Gather_293" -> "380 Unsqueeze_304" [label="[]", style=dashed]; +"369 Shape_294" -> "371 Gather_296" [label="[-1]", style=dashed]; +"370 Constant_295" -> "371 Gather_296" [label="[]", style=dashed]; +"371 Gather_296" -> "382 Unsqueeze_305" [label="[]", style=dashed]; +"372 Shape_297" -> "374 Gather_299" [label="[-1]", style=dashed]; +"373 Constant_298" -> "374 Gather_299" [label="[]", style=dashed]; +"374 Gather_299" -> "376 Div_301" [label="[]", style=dashed]; +"375 Constant_300" -> "376 Div_301" [label="[]", style=dashed]; +"376 Div_301" -> "377 Cast_302" [label="[]", style=dashed]; +"377 Cast_302" -> "378 Cast_303" [label="[]", style=dashed]; +"378 Cast_303" -> "384 Unsqueeze_306" [label="[]", style=dashed]; +"379 Constant_nncf_349" -> "380 Unsqueeze_304" [label="[1]", style=dashed]; +"380 Unsqueeze_304" -> "385 Concat_307" [label="[1]", style=dashed]; +"381 Constant_nncf_351" -> "382 Unsqueeze_305" [label="[1]", style=dashed]; +"382 Unsqueeze_305" -> "385 Concat_307" [label="[1]", style=dashed]; +"383 Constant_nncf_353" -> "384 Unsqueeze_306" [label="[1]", style=dashed]; +"384 Unsqueeze_306" -> "385 Concat_307" [label="[1]", style=dashed]; +"385 Concat_307" -> "386 Reshape_308" [label="[4]", style=dashed]; +"386 Reshape_308" -> "387 QuantizeLinear_517_1" [label="[]", style=solid]; +"386 Reshape_308" -> "412 Transpose_329" [label="[]", style=solid]; +"387 QuantizeLinear_517_1" -> "388 DequantizeLinear_517_1" [label="[]", style=dashed]; +"388 DequantizeLinear_517_1" -> "389 Transpose_309" [label="[]", style=solid]; +"389 Transpose_309" -> "418 MatMul_333" [label="[]", style=solid]; +"390 Shape_310" -> "392 Gather_312" [label="[-1]", style=dashed]; +"391 Constant_311" -> "392 Gather_312" [label="[]", style=dashed]; +"392 Gather_312" -> "404 Unsqueeze_323" [label="[]", style=dashed]; +"393 Shape_313" -> "395 Gather_315" [label="[-1]", style=dashed]; +"394 Constant_314" -> "395 Gather_315" [label="[]", style=dashed]; +"395 Gather_315" -> "406 Unsqueeze_324" [label="[]", style=dashed]; +"396 Shape_316" -> "398 Gather_318" [label="[-1]", style=dashed]; +"397 Constant_317" -> "398 Gather_318" [label="[]", style=dashed]; +"398 Gather_318" -> "400 Div_320" [label="[]", style=dashed]; +"399 Constant_319" -> "400 Div_320" [label="[]", style=dashed]; +"400 Div_320" -> "401 Cast_321" [label="[]", style=dashed]; +"401 Cast_321" -> "402 Cast_322" [label="[]", style=dashed]; +"402 Cast_322" -> "408 Unsqueeze_325" [label="[]", style=dashed]; +"403 Constant_nncf_371" -> "404 Unsqueeze_323" [label="[1]", style=dashed]; +"404 Unsqueeze_323" -> "409 Concat_326" [label="[1]", style=dashed]; +"405 Constant_nncf_373" -> "406 Unsqueeze_324" [label="[1]", style=dashed]; +"406 Unsqueeze_324" -> "409 Concat_326" [label="[1]", style=dashed]; +"407 Constant_nncf_375" -> "408 Unsqueeze_325" [label="[1]", style=dashed]; +"408 Unsqueeze_325" -> "409 Concat_326" [label="[1]", style=dashed]; +"409 Concat_326" -> "410 Reshape_327" [label="[4]", style=dashed]; +"410 Reshape_327" -> "411 Transpose_328" [label="[]", style=solid]; +"411 Transpose_328" -> "416 Unsqueeze_331" [label="[]", style=solid]; +"411 Transpose_328" -> "448 MatMul_357" [label="[]", style=solid]; +"412 Transpose_329" -> "414 Unsqueeze_330" [label="[]", style=solid]; +"413 Constant_nncf_381" -> "414 Unsqueeze_330" [label="[1]", style=dashed]; +"414 Unsqueeze_330" -> "417 Concat_332" [label="[]", style=solid]; +"415 Constant_nncf_383" -> "416 Unsqueeze_331" [label="[1]", style=dashed]; +"416 Unsqueeze_331" -> "417 Concat_332" [label="[]", style=solid]; +"417 Concat_332" -> "3233 nncf_model_output_2" [label="[2, 1, 12, 8, 64]", style=solid]; +"418 MatMul_333" -> "420 Div_335" [label="[]", style=solid]; +"419 Constant_334" -> "420 Div_335" [label="[]", style=solid]; +"420 Div_335" -> "421 Shape_336" [label="[]", style=solid]; +"420 Div_335" -> "424 Shape_339" [label="[]", style=solid]; +"420 Div_335" -> "438 Mul_350" [label="[]", style=solid]; +"421 Shape_336" -> "423 Gather_338" [label="[-1]", style=dashed]; +"422 Constant_337" -> "423 Gather_338" [label="[]", style=dashed]; +"423 Gather_338" -> "427 Sub_342" [label="[]", style=dashed]; +"424 Shape_339" -> "426 Gather_341" [label="[-1]", style=dashed]; +"425 Constant_340" -> "426 Gather_341" [label="[]", style=dashed]; +"426 Gather_341" -> "427 Sub_342" [label="[]", style=dashed]; +"426 Gather_341" -> "431 Unsqueeze_344" [label="[]", style=dashed]; +"426 Gather_341" -> "435 Unsqueeze_347" [label="[]", style=dashed]; +"427 Sub_342" -> "429 Unsqueeze_343" [label="[]", style=dashed]; +"428 Constant_nncf_396" -> "429 Unsqueeze_343" [label="[1]", style=dashed]; +"429 Unsqueeze_343" -> "433 Slice_346" [label="[1]", style=dashed]; +"430 Constant_nncf_398" -> "431 Unsqueeze_344" [label="[1]", style=dashed]; +"431 Unsqueeze_344" -> "433 Slice_346" [label="[1]", style=dashed]; +"432 Constant_345" -> "433 Slice_346" [label="[1]", style=dashed]; +"433 Slice_346" -> "437 Slice_349" [label="[-1, -1, -1, -1]", style=solid]; +"434 Constant_nncf_402" -> "435 Unsqueeze_347" [label="[1]", style=dashed]; +"435 Unsqueeze_347" -> "437 Slice_349" [label="[1]", style=dashed]; +"436 Constant_348" -> "437 Slice_349" [label="[1]", style=dashed]; +"437 Slice_349" -> "438 Mul_350" [label="[-1, -1, -1, -1]", style=solid]; +"437 Slice_349" -> "440 Sub_352" [label="[-1, -1, -1, -1]", style=solid]; +"438 Mul_350" -> "443 Sub_355" [label="[]", style=solid]; +"439 Constant_351" -> "440 Sub_352" [label="[]", style=solid]; +"440 Sub_352" -> "442 Mul_354" [label="[-1, -1, -1, -1]", style=solid]; +"441 Constant_353" -> "442 Mul_354" [label="[]", style=solid]; +"442 Mul_354" -> "443 Sub_355" [label="[-1, -1, -1, -1]", style=solid]; +"443 Sub_355" -> "444 Shape_nncf_412" [label="[]", style=solid]; +"443 Sub_355" -> "445 Flatten_nncf_413" [label="[]", style=solid]; +"444 Shape_nncf_412" -> "447 Reshape_nncf_415" [label="[-1]", style=dashed]; +"445 Flatten_nncf_413" -> "446 Softmax_356" [label="[]", style=solid]; +"446 Softmax_356" -> "447 Reshape_nncf_415" [label="[]", style=solid]; +"447 Reshape_nncf_415" -> "448 MatMul_357" [label="[]", style=solid]; +"448 MatMul_357" -> "449 QuantizeLinear_574_1" [label="[]", style=solid]; +"449 QuantizeLinear_574_1" -> "450 DequantizeLinear_574_1" [label="[]", style=dashed]; +"450 DequantizeLinear_574_1" -> "451 Transpose_358" [label="[]", style=solid]; +"451 Transpose_358" -> "452 Shape_359" [label="[]", style=solid]; +"451 Transpose_358" -> "455 Shape_362" [label="[]", style=solid]; +"451 Transpose_358" -> "458 Shape_365" [label="[]", style=solid]; +"451 Transpose_358" -> "461 Shape_368" [label="[]", style=solid]; +"451 Transpose_358" -> "472 Reshape_376" [label="[]", style=solid]; +"452 Shape_359" -> "454 Gather_361" [label="[-1]", style=dashed]; +"453 Constant_360" -> "454 Gather_361" [label="[]", style=dashed]; +"454 Gather_361" -> "466 Unsqueeze_372" [label="[]", style=dashed]; +"455 Shape_362" -> "457 Gather_364" [label="[-1]", style=dashed]; +"456 Constant_363" -> "457 Gather_364" [label="[]", style=dashed]; +"457 Gather_364" -> "468 Unsqueeze_373" [label="[]", style=dashed]; +"458 Shape_365" -> "460 Gather_367" [label="[-1]", style=dashed]; +"459 Constant_366" -> "460 Gather_367" [label="[]", style=dashed]; +"460 Gather_367" -> "464 Mul_371" [label="[]", style=dashed]; +"461 Shape_368" -> "463 Gather_370" [label="[-1]", style=dashed]; +"462 Constant_369" -> "463 Gather_370" [label="[]", style=dashed]; +"463 Gather_370" -> "464 Mul_371" [label="[]", style=dashed]; +"464 Mul_371" -> "470 Unsqueeze_374" [label="[]", style=dashed]; +"465 Constant_nncf_431" -> "466 Unsqueeze_372" [label="[1]", style=dashed]; +"466 Unsqueeze_372" -> "471 Concat_375" [label="[1]", style=dashed]; +"467 Constant_nncf_433" -> "468 Unsqueeze_373" [label="[1]", style=dashed]; +"468 Unsqueeze_373" -> "471 Concat_375" [label="[1]", style=dashed]; +"469 Constant_nncf_435" -> "470 Unsqueeze_374" [label="[1]", style=dashed]; +"470 Unsqueeze_374" -> "471 Concat_375" [label="[1]", style=dashed]; +"471 Concat_375" -> "472 Reshape_376" [label="[3]", style=dashed]; +"472 Reshape_376" -> "473 Shape_377" [label="[]", style=solid]; +"472 Reshape_376" -> "476 Shape_380" [label="[]", style=solid]; +"472 Reshape_376" -> "479 Shape_383" [label="[]", style=solid]; +"472 Reshape_376" -> "485 Reshape_388" [label="[]", style=solid]; +"473 Shape_377" -> "475 Gather_379" [label="[-1]", style=dashed]; +"474 Constant_378" -> "475 Gather_379" [label="[]", style=dashed]; +"475 Gather_379" -> "490 Unsqueeze_390" [label="[]", style=dashed]; +"476 Shape_380" -> "478 Gather_382" [label="[-1]", style=dashed]; +"477 Constant_381" -> "478 Gather_382" [label="[]", style=dashed]; +"478 Gather_382" -> "492 Unsqueeze_391" [label="[]", style=dashed]; +"479 Shape_383" -> "481 Gather_385" [label="[-1]", style=dashed]; +"480 Constant_384" -> "481 Gather_385" [label="[]", style=dashed]; +"481 Gather_385" -> "483 Unsqueeze_386" [label="[]", style=dashed]; +"482 Constant_nncf_448" -> "483 Unsqueeze_386" [label="[1]", style=dashed]; +"483 Unsqueeze_386" -> "484 Concat_387" [label="[1]", style=dashed]; +"484 Concat_387" -> "485 Reshape_388" [label="[2]", style=dashed]; +"485 Reshape_388" -> "488 Gemm_389" [label="[]", style=solid]; +"486 QuantizeLinear_h.1.attn.c_proj.weight_1" -> "487 DequantizeLinear_h.1.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"487 DequantizeLinear_h.1.attn.c_proj.weight_1" -> "488 Gemm_389" [label="[768, 768]", style=solid]; +"488 Gemm_389" -> "494 Reshape_393" [label="[]", style=solid]; +"489 Constant_nncf_453" -> "490 Unsqueeze_390" [label="[1]", style=dashed]; +"490 Unsqueeze_390" -> "493 Concat_392" [label="[1]", style=dashed]; +"491 Constant_nncf_455" -> "492 Unsqueeze_391" [label="[1]", style=dashed]; +"492 Unsqueeze_391" -> "493 Concat_392" [label="[1]", style=dashed]; +"493 Concat_392" -> "494 Reshape_393" [label="[3]", style=dashed]; +"494 Reshape_393" -> "495 Add_394" [label="[]", style=solid]; +"495 Add_394" -> "496 ReduceMean_395" [label="[]", style=solid]; +"495 Add_394" -> "497 Sub_396" [label="[]", style=solid]; +"495 Add_394" -> "568 Add_453" [label="[]", style=solid]; +"496 ReduceMean_395" -> "497 Sub_396" [label="[]", style=solid]; +"497 Sub_396" -> "499 Pow_398" [label="[]", style=solid]; +"497 Sub_396" -> "504 Div_403" [label="[]", style=solid]; +"498 Constant_397" -> "499 Pow_398" [label="[]", style=solid]; +"499 Pow_398" -> "500 ReduceMean_399" [label="[]", style=solid]; +"500 ReduceMean_399" -> "502 Add_401" [label="[]", style=solid]; +"501 Constant_400" -> "502 Add_401" [label="[]", style=solid]; +"502 Add_401" -> "503 Sqrt_402" [label="[]", style=solid]; +"503 Sqrt_402" -> "504 Div_403" [label="[]", style=solid]; +"504 Div_403" -> "505 Mul_404" [label="[]", style=solid]; +"505 Mul_404" -> "506 Add_405" [label="[]", style=solid]; +"506 Add_405" -> "507 QuantizeLinear_626_1" [label="[]", style=solid]; +"507 QuantizeLinear_626_1" -> "508 DequantizeLinear_626_1" [label="[]", style=dashed]; +"508 DequantizeLinear_626_1" -> "509 Shape_406" [label="[]", style=solid]; +"508 DequantizeLinear_626_1" -> "512 Shape_409" [label="[]", style=solid]; +"508 DequantizeLinear_626_1" -> "515 Shape_412" [label="[]", style=solid]; +"508 DequantizeLinear_626_1" -> "521 Reshape_417" [label="[]", style=solid]; +"509 Shape_406" -> "511 Gather_408" [label="[-1]", style=dashed]; +"510 Constant_407" -> "511 Gather_408" [label="[]", style=dashed]; +"511 Gather_408" -> "526 Unsqueeze_419" [label="[]", style=dashed]; +"512 Shape_409" -> "514 Gather_411" [label="[-1]", style=dashed]; +"513 Constant_410" -> "514 Gather_411" [label="[]", style=dashed]; +"514 Gather_411" -> "528 Unsqueeze_420" [label="[]", style=dashed]; +"515 Shape_412" -> "517 Gather_414" [label="[-1]", style=dashed]; +"516 Constant_413" -> "517 Gather_414" [label="[]", style=dashed]; +"517 Gather_414" -> "519 Unsqueeze_415" [label="[]", style=dashed]; +"518 Constant_nncf_480" -> "519 Unsqueeze_415" [label="[1]", style=dashed]; +"519 Unsqueeze_415" -> "520 Concat_416" [label="[1]", style=dashed]; +"520 Concat_416" -> "521 Reshape_417" [label="[2]", style=dashed]; +"521 Reshape_417" -> "524 Gemm_418" [label="[]", style=solid]; +"522 QuantizeLinear_h.1.mlp.c_fc.weight_1" -> "523 DequantizeLinear_h.1.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"523 DequantizeLinear_h.1.mlp.c_fc.weight_1" -> "524 Gemm_418" [label="[768, 3072]", style=solid]; +"524 Gemm_418" -> "530 Reshape_422" [label="[]", style=solid]; +"525 Constant_nncf_485" -> "526 Unsqueeze_419" [label="[1]", style=dashed]; +"526 Unsqueeze_419" -> "529 Concat_421" [label="[1]", style=dashed]; +"527 Constant_nncf_487" -> "528 Unsqueeze_420" [label="[1]", style=dashed]; +"528 Unsqueeze_420" -> "529 Concat_421" [label="[1]", style=dashed]; +"529 Concat_421" -> "530 Reshape_422" [label="[3]", style=dashed]; +"530 Reshape_422" -> "532 Mul_424" [label="[]", style=solid]; +"530 Reshape_422" -> "534 Pow_426" [label="[]", style=solid]; +"530 Reshape_422" -> "537 Add_429" [label="[]", style=solid]; +"531 Constant_423" -> "532 Mul_424" [label="[]", style=solid]; +"532 Mul_424" -> "543 Mul_435" [label="[]", style=solid]; +"533 Constant_425" -> "534 Pow_426" [label="[]", style=solid]; +"534 Pow_426" -> "536 Mul_428" [label="[]", style=solid]; +"535 Constant_427" -> "536 Mul_428" [label="[]", style=solid]; +"536 Mul_428" -> "537 Add_429" [label="[]", style=solid]; +"537 Add_429" -> "539 Mul_431" [label="[]", style=solid]; +"538 Constant_430" -> "539 Mul_431" [label="[]", style=solid]; +"539 Mul_431" -> "540 Tanh_432" [label="[]", style=solid]; +"540 Tanh_432" -> "542 Add_434" [label="[]", style=solid]; +"541 Constant_433" -> "542 Add_434" [label="[]", style=solid]; +"542 Add_434" -> "543 Mul_435" [label="[]", style=solid]; +"543 Mul_435" -> "544 QuantizeLinear_660_1" [label="[]", style=solid]; +"544 QuantizeLinear_660_1" -> "545 DequantizeLinear_660_1" [label="[]", style=dashed]; +"545 DequantizeLinear_660_1" -> "546 Shape_436" [label="[]", style=solid]; +"545 DequantizeLinear_660_1" -> "549 Shape_439" [label="[]", style=solid]; +"545 DequantizeLinear_660_1" -> "552 Shape_442" [label="[]", style=solid]; +"545 DequantizeLinear_660_1" -> "558 Reshape_447" [label="[]", style=solid]; +"546 Shape_436" -> "548 Gather_438" [label="[-1]", style=dashed]; +"547 Constant_437" -> "548 Gather_438" [label="[]", style=dashed]; +"548 Gather_438" -> "563 Unsqueeze_449" [label="[]", style=dashed]; +"549 Shape_439" -> "551 Gather_441" [label="[-1]", style=dashed]; +"550 Constant_440" -> "551 Gather_441" [label="[]", style=dashed]; +"551 Gather_441" -> "565 Unsqueeze_450" [label="[]", style=dashed]; +"552 Shape_442" -> "554 Gather_444" [label="[-1]", style=dashed]; +"553 Constant_443" -> "554 Gather_444" [label="[]", style=dashed]; +"554 Gather_444" -> "556 Unsqueeze_445" [label="[]", style=dashed]; +"555 Constant_nncf_513" -> "556 Unsqueeze_445" [label="[1]", style=dashed]; +"556 Unsqueeze_445" -> "557 Concat_446" [label="[1]", style=dashed]; +"557 Concat_446" -> "558 Reshape_447" [label="[2]", style=dashed]; +"558 Reshape_447" -> "561 Gemm_448" [label="[]", style=solid]; +"559 QuantizeLinear_h.1.mlp.c_proj.weight_1" -> "560 DequantizeLinear_h.1.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"560 DequantizeLinear_h.1.mlp.c_proj.weight_1" -> "561 Gemm_448" [label="[3072, 768]", style=solid]; +"561 Gemm_448" -> "567 Reshape_452" [label="[]", style=solid]; +"562 Constant_nncf_518" -> "563 Unsqueeze_449" [label="[1]", style=dashed]; +"563 Unsqueeze_449" -> "566 Concat_451" [label="[1]", style=dashed]; +"564 Constant_nncf_520" -> "565 Unsqueeze_450" [label="[1]", style=dashed]; +"565 Unsqueeze_450" -> "566 Concat_451" [label="[1]", style=dashed]; +"566 Concat_451" -> "567 Reshape_452" [label="[3]", style=dashed]; +"567 Reshape_452" -> "568 Add_453" [label="[]", style=solid]; +"568 Add_453" -> "569 ReduceMean_454" [label="[]", style=solid]; +"568 Add_453" -> "570 Sub_455" [label="[]", style=solid]; +"568 Add_453" -> "759 Add_605" [label="[]", style=solid]; +"569 ReduceMean_454" -> "570 Sub_455" [label="[]", style=solid]; +"570 Sub_455" -> "572 Pow_457" [label="[]", style=solid]; +"570 Sub_455" -> "577 Div_462" [label="[]", style=solid]; +"571 Constant_456" -> "572 Pow_457" [label="[]", style=solid]; +"572 Pow_457" -> "573 ReduceMean_458" [label="[]", style=solid]; +"573 ReduceMean_458" -> "575 Add_460" [label="[]", style=solid]; +"574 Constant_459" -> "575 Add_460" [label="[]", style=solid]; +"575 Add_460" -> "576 Sqrt_461" [label="[]", style=solid]; +"576 Sqrt_461" -> "577 Div_462" [label="[]", style=solid]; +"577 Div_462" -> "578 Mul_463" [label="[]", style=solid]; +"578 Mul_463" -> "579 Add_464" [label="[]", style=solid]; +"579 Add_464" -> "580 QuantizeLinear_693_1" [label="[]", style=solid]; +"580 QuantizeLinear_693_1" -> "581 DequantizeLinear_693_1" [label="[]", style=dashed]; +"581 DequantizeLinear_693_1" -> "582 Shape_465" [label="[]", style=solid]; +"581 DequantizeLinear_693_1" -> "585 Shape_468" [label="[]", style=solid]; +"581 DequantizeLinear_693_1" -> "588 Shape_471" [label="[]", style=solid]; +"581 DequantizeLinear_693_1" -> "594 Reshape_476" [label="[]", style=solid]; +"582 Shape_465" -> "584 Gather_467" [label="[-1]", style=dashed]; +"583 Constant_466" -> "584 Gather_467" [label="[]", style=dashed]; +"584 Gather_467" -> "599 Unsqueeze_478" [label="[]", style=dashed]; +"585 Shape_468" -> "587 Gather_470" [label="[-1]", style=dashed]; +"586 Constant_469" -> "587 Gather_470" [label="[]", style=dashed]; +"587 Gather_470" -> "601 Unsqueeze_479" [label="[]", style=dashed]; +"588 Shape_471" -> "590 Gather_473" [label="[-1]", style=dashed]; +"589 Constant_472" -> "590 Gather_473" [label="[]", style=dashed]; +"590 Gather_473" -> "592 Unsqueeze_474" [label="[]", style=dashed]; +"591 Constant_nncf_545" -> "592 Unsqueeze_474" [label="[1]", style=dashed]; +"592 Unsqueeze_474" -> "593 Concat_475" [label="[1]", style=dashed]; +"593 Concat_475" -> "594 Reshape_476" [label="[2]", style=dashed]; +"594 Reshape_476" -> "597 Gemm_477" [label="[]", style=solid]; +"595 QuantizeLinear_h.2.attn.c_attn.weight_1" -> "596 DequantizeLinear_h.2.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"596 DequantizeLinear_h.2.attn.c_attn.weight_1" -> "597 Gemm_477" [label="[768, 2304]", style=solid]; +"597 Gemm_477" -> "603 Reshape_481" [label="[]", style=solid]; +"598 Constant_nncf_550" -> "599 Unsqueeze_478" [label="[1]", style=dashed]; +"599 Unsqueeze_478" -> "602 Concat_480" [label="[1]", style=dashed]; +"600 Constant_nncf_552" -> "601 Unsqueeze_479" [label="[1]", style=dashed]; +"601 Unsqueeze_479" -> "602 Concat_480" [label="[1]", style=dashed]; +"602 Concat_480" -> "603 Reshape_481" [label="[3]", style=dashed]; +"603 Reshape_481" -> "605 Split_482" [label="[]", style=solid]; +"604 Constant_nncf_556" -> "605 Split_482" [label="[3]", style=dashed]; +"605 Split_482" -> "606 QuantizeLinear_query.5_1" [label="[]", style=solid]; +"605 Split_482" -> "608 Shape_483" [label="[]", style=solid]; +"605 Split_482" -> "611 Shape_486" [label="[]", style=solid]; +"605 Split_482" -> "614 Shape_489" [label="[]", style=solid]; +"605 Split_482" -> "630 Shape_502" [label="[]", style=solid]; +"605 Split_482" -> "633 Shape_505" [label="[]", style=solid]; +"605 Split_482" -> "636 Shape_508" [label="[]", style=solid]; +"605 Split_482" -> "650 Reshape_519" [label="[]", style=solid]; +"605 Split_482" -> "654 Shape_521" [label="[]", style=solid]; +"605 Split_482" -> "657 Shape_524" [label="[]", style=solid]; +"605 Split_482" -> "660 Shape_527" [label="[]", style=solid]; +"605 Split_482" -> "674 Reshape_538" [label="[]", style=solid]; +"606 QuantizeLinear_query.5_1" -> "607 DequantizeLinear_query.5_1" [label="[]", style=dashed]; +"607 DequantizeLinear_query.5_1" -> "628 Reshape_500" [label="[]", style=solid]; +"608 Shape_483" -> "610 Gather_485" [label="[-1]", style=dashed]; +"609 Constant_484" -> "610 Gather_485" [label="[]", style=dashed]; +"610 Gather_485" -> "622 Unsqueeze_496" [label="[]", style=dashed]; +"611 Shape_486" -> "613 Gather_488" [label="[-1]", style=dashed]; +"612 Constant_487" -> "613 Gather_488" [label="[]", style=dashed]; +"613 Gather_488" -> "624 Unsqueeze_497" [label="[]", style=dashed]; +"614 Shape_489" -> "616 Gather_491" [label="[-1]", style=dashed]; +"615 Constant_490" -> "616 Gather_491" [label="[]", style=dashed]; +"616 Gather_491" -> "618 Div_493" [label="[]", style=dashed]; +"617 Constant_492" -> "618 Div_493" [label="[]", style=dashed]; +"618 Div_493" -> "619 Cast_494" [label="[]", style=dashed]; +"619 Cast_494" -> "620 Cast_495" [label="[]", style=dashed]; +"620 Cast_495" -> "626 Unsqueeze_498" [label="[]", style=dashed]; +"621 Constant_nncf_571" -> "622 Unsqueeze_496" [label="[1]", style=dashed]; +"622 Unsqueeze_496" -> "627 Concat_499" [label="[1]", style=dashed]; +"623 Constant_nncf_573" -> "624 Unsqueeze_497" [label="[1]", style=dashed]; +"624 Unsqueeze_497" -> "627 Concat_499" [label="[1]", style=dashed]; +"625 Constant_nncf_575" -> "626 Unsqueeze_498" [label="[1]", style=dashed]; +"626 Unsqueeze_498" -> "627 Concat_499" [label="[1]", style=dashed]; +"627 Concat_499" -> "628 Reshape_500" [label="[4]", style=dashed]; +"628 Reshape_500" -> "629 Transpose_501" [label="[]", style=solid]; +"629 Transpose_501" -> "682 MatMul_544" [label="[]", style=solid]; +"630 Shape_502" -> "632 Gather_504" [label="[-1]", style=dashed]; +"631 Constant_503" -> "632 Gather_504" [label="[]", style=dashed]; +"632 Gather_504" -> "644 Unsqueeze_515" [label="[]", style=dashed]; +"633 Shape_505" -> "635 Gather_507" [label="[-1]", style=dashed]; +"634 Constant_506" -> "635 Gather_507" [label="[]", style=dashed]; +"635 Gather_507" -> "646 Unsqueeze_516" [label="[]", style=dashed]; +"636 Shape_508" -> "638 Gather_510" [label="[-1]", style=dashed]; +"637 Constant_509" -> "638 Gather_510" [label="[]", style=dashed]; +"638 Gather_510" -> "640 Div_512" [label="[]", style=dashed]; +"639 Constant_511" -> "640 Div_512" [label="[]", style=dashed]; +"640 Div_512" -> "641 Cast_513" [label="[]", style=dashed]; +"641 Cast_513" -> "642 Cast_514" [label="[]", style=dashed]; +"642 Cast_514" -> "648 Unsqueeze_517" [label="[]", style=dashed]; +"643 Constant_nncf_593" -> "644 Unsqueeze_515" [label="[1]", style=dashed]; +"644 Unsqueeze_515" -> "649 Concat_518" [label="[1]", style=dashed]; +"645 Constant_nncf_595" -> "646 Unsqueeze_516" [label="[1]", style=dashed]; +"646 Unsqueeze_516" -> "649 Concat_518" [label="[1]", style=dashed]; +"647 Constant_nncf_597" -> "648 Unsqueeze_517" [label="[1]", style=dashed]; +"648 Unsqueeze_517" -> "649 Concat_518" [label="[1]", style=dashed]; +"649 Concat_518" -> "650 Reshape_519" [label="[4]", style=dashed]; +"650 Reshape_519" -> "651 QuantizeLinear_758_1" [label="[]", style=solid]; +"650 Reshape_519" -> "676 Transpose_540" [label="[]", style=solid]; +"651 QuantizeLinear_758_1" -> "652 DequantizeLinear_758_1" [label="[]", style=dashed]; +"652 DequantizeLinear_758_1" -> "653 Transpose_520" [label="[]", style=solid]; +"653 Transpose_520" -> "682 MatMul_544" [label="[]", style=solid]; +"654 Shape_521" -> "656 Gather_523" [label="[-1]", style=dashed]; +"655 Constant_522" -> "656 Gather_523" [label="[]", style=dashed]; +"656 Gather_523" -> "668 Unsqueeze_534" [label="[]", style=dashed]; +"657 Shape_524" -> "659 Gather_526" [label="[-1]", style=dashed]; +"658 Constant_525" -> "659 Gather_526" [label="[]", style=dashed]; +"659 Gather_526" -> "670 Unsqueeze_535" [label="[]", style=dashed]; +"660 Shape_527" -> "662 Gather_529" [label="[-1]", style=dashed]; +"661 Constant_528" -> "662 Gather_529" [label="[]", style=dashed]; +"662 Gather_529" -> "664 Div_531" [label="[]", style=dashed]; +"663 Constant_530" -> "664 Div_531" [label="[]", style=dashed]; +"664 Div_531" -> "665 Cast_532" [label="[]", style=dashed]; +"665 Cast_532" -> "666 Cast_533" [label="[]", style=dashed]; +"666 Cast_533" -> "672 Unsqueeze_536" [label="[]", style=dashed]; +"667 Constant_nncf_615" -> "668 Unsqueeze_534" [label="[1]", style=dashed]; +"668 Unsqueeze_534" -> "673 Concat_537" [label="[1]", style=dashed]; +"669 Constant_nncf_617" -> "670 Unsqueeze_535" [label="[1]", style=dashed]; +"670 Unsqueeze_535" -> "673 Concat_537" [label="[1]", style=dashed]; +"671 Constant_nncf_619" -> "672 Unsqueeze_536" [label="[1]", style=dashed]; +"672 Unsqueeze_536" -> "673 Concat_537" [label="[1]", style=dashed]; +"673 Concat_537" -> "674 Reshape_538" [label="[4]", style=dashed]; +"674 Reshape_538" -> "675 Transpose_539" [label="[]", style=solid]; +"675 Transpose_539" -> "680 Unsqueeze_542" [label="[]", style=solid]; +"675 Transpose_539" -> "712 MatMul_568" [label="[]", style=solid]; +"676 Transpose_540" -> "678 Unsqueeze_541" [label="[]", style=solid]; +"677 Constant_nncf_625" -> "678 Unsqueeze_541" [label="[1]", style=dashed]; +"678 Unsqueeze_541" -> "681 Concat_543" [label="[]", style=solid]; +"679 Constant_nncf_627" -> "680 Unsqueeze_542" [label="[1]", style=dashed]; +"680 Unsqueeze_542" -> "681 Concat_543" [label="[]", style=solid]; +"681 Concat_543" -> "3234 nncf_model_output_3" [label="[2, 1, 12, 8, 64]", style=solid]; +"682 MatMul_544" -> "684 Div_546" [label="[]", style=solid]; +"683 Constant_545" -> "684 Div_546" [label="[]", style=solid]; +"684 Div_546" -> "685 Shape_547" [label="[]", style=solid]; +"684 Div_546" -> "688 Shape_550" [label="[]", style=solid]; +"684 Div_546" -> "702 Mul_561" [label="[]", style=solid]; +"685 Shape_547" -> "687 Gather_549" [label="[-1]", style=dashed]; +"686 Constant_548" -> "687 Gather_549" [label="[]", style=dashed]; +"687 Gather_549" -> "691 Sub_553" [label="[]", style=dashed]; +"688 Shape_550" -> "690 Gather_552" [label="[-1]", style=dashed]; +"689 Constant_551" -> "690 Gather_552" [label="[]", style=dashed]; +"690 Gather_552" -> "691 Sub_553" [label="[]", style=dashed]; +"690 Gather_552" -> "695 Unsqueeze_555" [label="[]", style=dashed]; +"690 Gather_552" -> "699 Unsqueeze_558" [label="[]", style=dashed]; +"691 Sub_553" -> "693 Unsqueeze_554" [label="[]", style=dashed]; +"692 Constant_nncf_640" -> "693 Unsqueeze_554" [label="[1]", style=dashed]; +"693 Unsqueeze_554" -> "697 Slice_557" [label="[1]", style=dashed]; +"694 Constant_nncf_642" -> "695 Unsqueeze_555" [label="[1]", style=dashed]; +"695 Unsqueeze_555" -> "697 Slice_557" [label="[1]", style=dashed]; +"696 Constant_556" -> "697 Slice_557" [label="[1]", style=dashed]; +"697 Slice_557" -> "701 Slice_560" [label="[-1, -1, -1, -1]", style=solid]; +"698 Constant_nncf_646" -> "699 Unsqueeze_558" [label="[1]", style=dashed]; +"699 Unsqueeze_558" -> "701 Slice_560" [label="[1]", style=dashed]; +"700 Constant_559" -> "701 Slice_560" [label="[1]", style=dashed]; +"701 Slice_560" -> "702 Mul_561" [label="[-1, -1, -1, -1]", style=solid]; +"701 Slice_560" -> "704 Sub_563" [label="[-1, -1, -1, -1]", style=solid]; +"702 Mul_561" -> "707 Sub_566" [label="[]", style=solid]; +"703 Constant_562" -> "704 Sub_563" [label="[]", style=solid]; +"704 Sub_563" -> "706 Mul_565" [label="[-1, -1, -1, -1]", style=solid]; +"705 Constant_564" -> "706 Mul_565" [label="[]", style=solid]; +"706 Mul_565" -> "707 Sub_566" [label="[-1, -1, -1, -1]", style=solid]; +"707 Sub_566" -> "708 Shape_nncf_656" [label="[]", style=solid]; +"707 Sub_566" -> "709 Flatten_nncf_657" [label="[]", style=solid]; +"708 Shape_nncf_656" -> "711 Reshape_nncf_659" [label="[-1]", style=dashed]; +"709 Flatten_nncf_657" -> "710 Softmax_567" [label="[]", style=solid]; +"710 Softmax_567" -> "711 Reshape_nncf_659" [label="[]", style=solid]; +"711 Reshape_nncf_659" -> "712 MatMul_568" [label="[]", style=solid]; +"712 MatMul_568" -> "713 QuantizeLinear_815_1" [label="[]", style=solid]; +"713 QuantizeLinear_815_1" -> "714 DequantizeLinear_815_1" [label="[]", style=dashed]; +"714 DequantizeLinear_815_1" -> "715 Transpose_569" [label="[]", style=solid]; +"715 Transpose_569" -> "716 Shape_570" [label="[]", style=solid]; +"715 Transpose_569" -> "719 Shape_573" [label="[]", style=solid]; +"715 Transpose_569" -> "722 Shape_576" [label="[]", style=solid]; +"715 Transpose_569" -> "725 Shape_579" [label="[]", style=solid]; +"715 Transpose_569" -> "736 Reshape_587" [label="[]", style=solid]; +"716 Shape_570" -> "718 Gather_572" [label="[-1]", style=dashed]; +"717 Constant_571" -> "718 Gather_572" [label="[]", style=dashed]; +"718 Gather_572" -> "730 Unsqueeze_583" [label="[]", style=dashed]; +"719 Shape_573" -> "721 Gather_575" [label="[-1]", style=dashed]; +"720 Constant_574" -> "721 Gather_575" [label="[]", style=dashed]; +"721 Gather_575" -> "732 Unsqueeze_584" [label="[]", style=dashed]; +"722 Shape_576" -> "724 Gather_578" [label="[-1]", style=dashed]; +"723 Constant_577" -> "724 Gather_578" [label="[]", style=dashed]; +"724 Gather_578" -> "728 Mul_582" [label="[]", style=dashed]; +"725 Shape_579" -> "727 Gather_581" [label="[-1]", style=dashed]; +"726 Constant_580" -> "727 Gather_581" [label="[]", style=dashed]; +"727 Gather_581" -> "728 Mul_582" [label="[]", style=dashed]; +"728 Mul_582" -> "734 Unsqueeze_585" [label="[]", style=dashed]; +"729 Constant_nncf_675" -> "730 Unsqueeze_583" [label="[1]", style=dashed]; +"730 Unsqueeze_583" -> "735 Concat_586" [label="[1]", style=dashed]; +"731 Constant_nncf_677" -> "732 Unsqueeze_584" [label="[1]", style=dashed]; +"732 Unsqueeze_584" -> "735 Concat_586" [label="[1]", style=dashed]; +"733 Constant_nncf_679" -> "734 Unsqueeze_585" [label="[1]", style=dashed]; +"734 Unsqueeze_585" -> "735 Concat_586" [label="[1]", style=dashed]; +"735 Concat_586" -> "736 Reshape_587" [label="[3]", style=dashed]; +"736 Reshape_587" -> "737 Shape_588" [label="[]", style=solid]; +"736 Reshape_587" -> "740 Shape_591" [label="[]", style=solid]; +"736 Reshape_587" -> "743 Shape_594" [label="[]", style=solid]; +"736 Reshape_587" -> "749 Reshape_599" [label="[]", style=solid]; +"737 Shape_588" -> "739 Gather_590" [label="[-1]", style=dashed]; +"738 Constant_589" -> "739 Gather_590" [label="[]", style=dashed]; +"739 Gather_590" -> "754 Unsqueeze_601" [label="[]", style=dashed]; +"740 Shape_591" -> "742 Gather_593" [label="[-1]", style=dashed]; +"741 Constant_592" -> "742 Gather_593" [label="[]", style=dashed]; +"742 Gather_593" -> "756 Unsqueeze_602" [label="[]", style=dashed]; +"743 Shape_594" -> "745 Gather_596" [label="[-1]", style=dashed]; +"744 Constant_595" -> "745 Gather_596" [label="[]", style=dashed]; +"745 Gather_596" -> "747 Unsqueeze_597" [label="[]", style=dashed]; +"746 Constant_nncf_692" -> "747 Unsqueeze_597" [label="[1]", style=dashed]; +"747 Unsqueeze_597" -> "748 Concat_598" [label="[1]", style=dashed]; +"748 Concat_598" -> "749 Reshape_599" [label="[2]", style=dashed]; +"749 Reshape_599" -> "752 Gemm_600" [label="[]", style=solid]; +"750 QuantizeLinear_h.2.attn.c_proj.weight_1" -> "751 DequantizeLinear_h.2.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"751 DequantizeLinear_h.2.attn.c_proj.weight_1" -> "752 Gemm_600" [label="[768, 768]", style=solid]; +"752 Gemm_600" -> "758 Reshape_604" [label="[]", style=solid]; +"753 Constant_nncf_697" -> "754 Unsqueeze_601" [label="[1]", style=dashed]; +"754 Unsqueeze_601" -> "757 Concat_603" [label="[1]", style=dashed]; +"755 Constant_nncf_699" -> "756 Unsqueeze_602" [label="[1]", style=dashed]; +"756 Unsqueeze_602" -> "757 Concat_603" [label="[1]", style=dashed]; +"757 Concat_603" -> "758 Reshape_604" [label="[3]", style=dashed]; +"758 Reshape_604" -> "759 Add_605" [label="[]", style=solid]; +"759 Add_605" -> "760 ReduceMean_606" [label="[]", style=solid]; +"759 Add_605" -> "761 Sub_607" [label="[]", style=solid]; +"759 Add_605" -> "832 Add_664" [label="[]", style=solid]; +"760 ReduceMean_606" -> "761 Sub_607" [label="[]", style=solid]; +"761 Sub_607" -> "763 Pow_609" [label="[]", style=solid]; +"761 Sub_607" -> "768 Div_614" [label="[]", style=solid]; +"762 Constant_608" -> "763 Pow_609" [label="[]", style=solid]; +"763 Pow_609" -> "764 ReduceMean_610" [label="[]", style=solid]; +"764 ReduceMean_610" -> "766 Add_612" [label="[]", style=solid]; +"765 Constant_611" -> "766 Add_612" [label="[]", style=solid]; +"766 Add_612" -> "767 Sqrt_613" [label="[]", style=solid]; +"767 Sqrt_613" -> "768 Div_614" [label="[]", style=solid]; +"768 Div_614" -> "769 Mul_615" [label="[]", style=solid]; +"769 Mul_615" -> "770 Add_616" [label="[]", style=solid]; +"770 Add_616" -> "771 QuantizeLinear_867_1" [label="[]", style=solid]; +"771 QuantizeLinear_867_1" -> "772 DequantizeLinear_867_1" [label="[]", style=dashed]; +"772 DequantizeLinear_867_1" -> "773 Shape_617" [label="[]", style=solid]; +"772 DequantizeLinear_867_1" -> "776 Shape_620" [label="[]", style=solid]; +"772 DequantizeLinear_867_1" -> "779 Shape_623" [label="[]", style=solid]; +"772 DequantizeLinear_867_1" -> "785 Reshape_628" [label="[]", style=solid]; +"773 Shape_617" -> "775 Gather_619" [label="[-1]", style=dashed]; +"774 Constant_618" -> "775 Gather_619" [label="[]", style=dashed]; +"775 Gather_619" -> "790 Unsqueeze_630" [label="[]", style=dashed]; +"776 Shape_620" -> "778 Gather_622" [label="[-1]", style=dashed]; +"777 Constant_621" -> "778 Gather_622" [label="[]", style=dashed]; +"778 Gather_622" -> "792 Unsqueeze_631" [label="[]", style=dashed]; +"779 Shape_623" -> "781 Gather_625" [label="[-1]", style=dashed]; +"780 Constant_624" -> "781 Gather_625" [label="[]", style=dashed]; +"781 Gather_625" -> "783 Unsqueeze_626" [label="[]", style=dashed]; +"782 Constant_nncf_724" -> "783 Unsqueeze_626" [label="[1]", style=dashed]; +"783 Unsqueeze_626" -> "784 Concat_627" [label="[1]", style=dashed]; +"784 Concat_627" -> "785 Reshape_628" [label="[2]", style=dashed]; +"785 Reshape_628" -> "788 Gemm_629" [label="[]", style=solid]; +"786 QuantizeLinear_h.2.mlp.c_fc.weight_1" -> "787 DequantizeLinear_h.2.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"787 DequantizeLinear_h.2.mlp.c_fc.weight_1" -> "788 Gemm_629" [label="[768, 3072]", style=solid]; +"788 Gemm_629" -> "794 Reshape_633" [label="[]", style=solid]; +"789 Constant_nncf_729" -> "790 Unsqueeze_630" [label="[1]", style=dashed]; +"790 Unsqueeze_630" -> "793 Concat_632" [label="[1]", style=dashed]; +"791 Constant_nncf_731" -> "792 Unsqueeze_631" [label="[1]", style=dashed]; +"792 Unsqueeze_631" -> "793 Concat_632" [label="[1]", style=dashed]; +"793 Concat_632" -> "794 Reshape_633" [label="[3]", style=dashed]; +"794 Reshape_633" -> "796 Mul_635" [label="[]", style=solid]; +"794 Reshape_633" -> "798 Pow_637" [label="[]", style=solid]; +"794 Reshape_633" -> "801 Add_640" [label="[]", style=solid]; +"795 Constant_634" -> "796 Mul_635" [label="[]", style=solid]; +"796 Mul_635" -> "807 Mul_646" [label="[]", style=solid]; +"797 Constant_636" -> "798 Pow_637" [label="[]", style=solid]; +"798 Pow_637" -> "800 Mul_639" [label="[]", style=solid]; +"799 Constant_638" -> "800 Mul_639" [label="[]", style=solid]; +"800 Mul_639" -> "801 Add_640" [label="[]", style=solid]; +"801 Add_640" -> "803 Mul_642" [label="[]", style=solid]; +"802 Constant_641" -> "803 Mul_642" [label="[]", style=solid]; +"803 Mul_642" -> "804 Tanh_643" [label="[]", style=solid]; +"804 Tanh_643" -> "806 Add_645" [label="[]", style=solid]; +"805 Constant_644" -> "806 Add_645" [label="[]", style=solid]; +"806 Add_645" -> "807 Mul_646" [label="[]", style=solid]; +"807 Mul_646" -> "808 QuantizeLinear_901_1" [label="[]", style=solid]; +"808 QuantizeLinear_901_1" -> "809 DequantizeLinear_901_1" [label="[]", style=dashed]; +"809 DequantizeLinear_901_1" -> "810 Shape_647" [label="[]", style=solid]; +"809 DequantizeLinear_901_1" -> "813 Shape_650" [label="[]", style=solid]; +"809 DequantizeLinear_901_1" -> "816 Shape_653" [label="[]", style=solid]; +"809 DequantizeLinear_901_1" -> "822 Reshape_658" [label="[]", style=solid]; +"810 Shape_647" -> "812 Gather_649" [label="[-1]", style=dashed]; +"811 Constant_648" -> "812 Gather_649" [label="[]", style=dashed]; +"812 Gather_649" -> "827 Unsqueeze_660" [label="[]", style=dashed]; +"813 Shape_650" -> "815 Gather_652" [label="[-1]", style=dashed]; +"814 Constant_651" -> "815 Gather_652" [label="[]", style=dashed]; +"815 Gather_652" -> "829 Unsqueeze_661" [label="[]", style=dashed]; +"816 Shape_653" -> "818 Gather_655" [label="[-1]", style=dashed]; +"817 Constant_654" -> "818 Gather_655" [label="[]", style=dashed]; +"818 Gather_655" -> "820 Unsqueeze_656" [label="[]", style=dashed]; +"819 Constant_nncf_757" -> "820 Unsqueeze_656" [label="[1]", style=dashed]; +"820 Unsqueeze_656" -> "821 Concat_657" [label="[1]", style=dashed]; +"821 Concat_657" -> "822 Reshape_658" [label="[2]", style=dashed]; +"822 Reshape_658" -> "825 Gemm_659" [label="[]", style=solid]; +"823 QuantizeLinear_h.2.mlp.c_proj.weight_1" -> "824 DequantizeLinear_h.2.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"824 DequantizeLinear_h.2.mlp.c_proj.weight_1" -> "825 Gemm_659" [label="[3072, 768]", style=solid]; +"825 Gemm_659" -> "831 Reshape_663" [label="[]", style=solid]; +"826 Constant_nncf_762" -> "827 Unsqueeze_660" [label="[1]", style=dashed]; +"827 Unsqueeze_660" -> "830 Concat_662" [label="[1]", style=dashed]; +"828 Constant_nncf_764" -> "829 Unsqueeze_661" [label="[1]", style=dashed]; +"829 Unsqueeze_661" -> "830 Concat_662" [label="[1]", style=dashed]; +"830 Concat_662" -> "831 Reshape_663" [label="[3]", style=dashed]; +"831 Reshape_663" -> "832 Add_664" [label="[]", style=solid]; +"832 Add_664" -> "833 ReduceMean_665" [label="[]", style=solid]; +"832 Add_664" -> "834 Sub_666" [label="[]", style=solid]; +"832 Add_664" -> "1023 Add_816" [label="[]", style=solid]; +"833 ReduceMean_665" -> "834 Sub_666" [label="[]", style=solid]; +"834 Sub_666" -> "836 Pow_668" [label="[]", style=solid]; +"834 Sub_666" -> "841 Div_673" [label="[]", style=solid]; +"835 Constant_667" -> "836 Pow_668" [label="[]", style=solid]; +"836 Pow_668" -> "837 ReduceMean_669" [label="[]", style=solid]; +"837 ReduceMean_669" -> "839 Add_671" [label="[]", style=solid]; +"838 Constant_670" -> "839 Add_671" [label="[]", style=solid]; +"839 Add_671" -> "840 Sqrt_672" [label="[]", style=solid]; +"840 Sqrt_672" -> "841 Div_673" [label="[]", style=solid]; +"841 Div_673" -> "842 Mul_674" [label="[]", style=solid]; +"842 Mul_674" -> "843 Add_675" [label="[]", style=solid]; +"843 Add_675" -> "844 QuantizeLinear_934_1" [label="[]", style=solid]; +"844 QuantizeLinear_934_1" -> "845 DequantizeLinear_934_1" [label="[]", style=dashed]; +"845 DequantizeLinear_934_1" -> "846 Shape_676" [label="[]", style=solid]; +"845 DequantizeLinear_934_1" -> "849 Shape_679" [label="[]", style=solid]; +"845 DequantizeLinear_934_1" -> "852 Shape_682" [label="[]", style=solid]; +"845 DequantizeLinear_934_1" -> "858 Reshape_687" [label="[]", style=solid]; +"846 Shape_676" -> "848 Gather_678" [label="[-1]", style=dashed]; +"847 Constant_677" -> "848 Gather_678" [label="[]", style=dashed]; +"848 Gather_678" -> "863 Unsqueeze_689" [label="[]", style=dashed]; +"849 Shape_679" -> "851 Gather_681" [label="[-1]", style=dashed]; +"850 Constant_680" -> "851 Gather_681" [label="[]", style=dashed]; +"851 Gather_681" -> "865 Unsqueeze_690" [label="[]", style=dashed]; +"852 Shape_682" -> "854 Gather_684" [label="[-1]", style=dashed]; +"853 Constant_683" -> "854 Gather_684" [label="[]", style=dashed]; +"854 Gather_684" -> "856 Unsqueeze_685" [label="[]", style=dashed]; +"855 Constant_nncf_789" -> "856 Unsqueeze_685" [label="[1]", style=dashed]; +"856 Unsqueeze_685" -> "857 Concat_686" [label="[1]", style=dashed]; +"857 Concat_686" -> "858 Reshape_687" [label="[2]", style=dashed]; +"858 Reshape_687" -> "861 Gemm_688" [label="[]", style=solid]; +"859 QuantizeLinear_h.3.attn.c_attn.weight_1" -> "860 DequantizeLinear_h.3.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"860 DequantizeLinear_h.3.attn.c_attn.weight_1" -> "861 Gemm_688" [label="[768, 2304]", style=solid]; +"861 Gemm_688" -> "867 Reshape_692" [label="[]", style=solid]; +"862 Constant_nncf_794" -> "863 Unsqueeze_689" [label="[1]", style=dashed]; +"863 Unsqueeze_689" -> "866 Concat_691" [label="[1]", style=dashed]; +"864 Constant_nncf_796" -> "865 Unsqueeze_690" [label="[1]", style=dashed]; +"865 Unsqueeze_690" -> "866 Concat_691" [label="[1]", style=dashed]; +"866 Concat_691" -> "867 Reshape_692" [label="[3]", style=dashed]; +"867 Reshape_692" -> "869 Split_693" [label="[]", style=solid]; +"868 Constant_nncf_800" -> "869 Split_693" [label="[3]", style=dashed]; +"869 Split_693" -> "870 QuantizeLinear_query.7_1" [label="[]", style=solid]; +"869 Split_693" -> "872 Shape_694" [label="[]", style=solid]; +"869 Split_693" -> "875 Shape_697" [label="[]", style=solid]; +"869 Split_693" -> "878 Shape_700" [label="[]", style=solid]; +"869 Split_693" -> "894 Shape_713" [label="[]", style=solid]; +"869 Split_693" -> "897 Shape_716" [label="[]", style=solid]; +"869 Split_693" -> "900 Shape_719" [label="[]", style=solid]; +"869 Split_693" -> "914 Reshape_730" [label="[]", style=solid]; +"869 Split_693" -> "918 Shape_732" [label="[]", style=solid]; +"869 Split_693" -> "921 Shape_735" [label="[]", style=solid]; +"869 Split_693" -> "924 Shape_738" [label="[]", style=solid]; +"869 Split_693" -> "938 Reshape_749" [label="[]", style=solid]; +"870 QuantizeLinear_query.7_1" -> "871 DequantizeLinear_query.7_1" [label="[]", style=dashed]; +"871 DequantizeLinear_query.7_1" -> "892 Reshape_711" [label="[]", style=solid]; +"872 Shape_694" -> "874 Gather_696" [label="[-1]", style=dashed]; +"873 Constant_695" -> "874 Gather_696" [label="[]", style=dashed]; +"874 Gather_696" -> "886 Unsqueeze_707" [label="[]", style=dashed]; +"875 Shape_697" -> "877 Gather_699" [label="[-1]", style=dashed]; +"876 Constant_698" -> "877 Gather_699" [label="[]", style=dashed]; +"877 Gather_699" -> "888 Unsqueeze_708" [label="[]", style=dashed]; +"878 Shape_700" -> "880 Gather_702" [label="[-1]", style=dashed]; +"879 Constant_701" -> "880 Gather_702" [label="[]", style=dashed]; +"880 Gather_702" -> "882 Div_704" [label="[]", style=dashed]; +"881 Constant_703" -> "882 Div_704" [label="[]", style=dashed]; +"882 Div_704" -> "883 Cast_705" [label="[]", style=dashed]; +"883 Cast_705" -> "884 Cast_706" [label="[]", style=dashed]; +"884 Cast_706" -> "890 Unsqueeze_709" [label="[]", style=dashed]; +"885 Constant_nncf_815" -> "886 Unsqueeze_707" [label="[1]", style=dashed]; +"886 Unsqueeze_707" -> "891 Concat_710" [label="[1]", style=dashed]; +"887 Constant_nncf_817" -> "888 Unsqueeze_708" [label="[1]", style=dashed]; +"888 Unsqueeze_708" -> "891 Concat_710" [label="[1]", style=dashed]; +"889 Constant_nncf_819" -> "890 Unsqueeze_709" [label="[1]", style=dashed]; +"890 Unsqueeze_709" -> "891 Concat_710" [label="[1]", style=dashed]; +"891 Concat_710" -> "892 Reshape_711" [label="[4]", style=dashed]; +"892 Reshape_711" -> "893 Transpose_712" [label="[]", style=solid]; +"893 Transpose_712" -> "946 MatMul_755" [label="[]", style=solid]; +"894 Shape_713" -> "896 Gather_715" [label="[-1]", style=dashed]; +"895 Constant_714" -> "896 Gather_715" [label="[]", style=dashed]; +"896 Gather_715" -> "908 Unsqueeze_726" [label="[]", style=dashed]; +"897 Shape_716" -> "899 Gather_718" [label="[-1]", style=dashed]; +"898 Constant_717" -> "899 Gather_718" [label="[]", style=dashed]; +"899 Gather_718" -> "910 Unsqueeze_727" [label="[]", style=dashed]; +"900 Shape_719" -> "902 Gather_721" [label="[-1]", style=dashed]; +"901 Constant_720" -> "902 Gather_721" [label="[]", style=dashed]; +"902 Gather_721" -> "904 Div_723" [label="[]", style=dashed]; +"903 Constant_722" -> "904 Div_723" [label="[]", style=dashed]; +"904 Div_723" -> "905 Cast_724" [label="[]", style=dashed]; +"905 Cast_724" -> "906 Cast_725" [label="[]", style=dashed]; +"906 Cast_725" -> "912 Unsqueeze_728" [label="[]", style=dashed]; +"907 Constant_nncf_837" -> "908 Unsqueeze_726" [label="[1]", style=dashed]; +"908 Unsqueeze_726" -> "913 Concat_729" [label="[1]", style=dashed]; +"909 Constant_nncf_839" -> "910 Unsqueeze_727" [label="[1]", style=dashed]; +"910 Unsqueeze_727" -> "913 Concat_729" [label="[1]", style=dashed]; +"911 Constant_nncf_841" -> "912 Unsqueeze_728" [label="[1]", style=dashed]; +"912 Unsqueeze_728" -> "913 Concat_729" [label="[1]", style=dashed]; +"913 Concat_729" -> "914 Reshape_730" [label="[4]", style=dashed]; +"914 Reshape_730" -> "915 QuantizeLinear_999_1" [label="[]", style=solid]; +"914 Reshape_730" -> "940 Transpose_751" [label="[]", style=solid]; +"915 QuantizeLinear_999_1" -> "916 DequantizeLinear_999_1" [label="[]", style=dashed]; +"916 DequantizeLinear_999_1" -> "917 Transpose_731" [label="[]", style=solid]; +"917 Transpose_731" -> "946 MatMul_755" [label="[]", style=solid]; +"918 Shape_732" -> "920 Gather_734" [label="[-1]", style=dashed]; +"919 Constant_733" -> "920 Gather_734" [label="[]", style=dashed]; +"920 Gather_734" -> "932 Unsqueeze_745" [label="[]", style=dashed]; +"921 Shape_735" -> "923 Gather_737" [label="[-1]", style=dashed]; +"922 Constant_736" -> "923 Gather_737" [label="[]", style=dashed]; +"923 Gather_737" -> "934 Unsqueeze_746" [label="[]", style=dashed]; +"924 Shape_738" -> "926 Gather_740" [label="[-1]", style=dashed]; +"925 Constant_739" -> "926 Gather_740" [label="[]", style=dashed]; +"926 Gather_740" -> "928 Div_742" [label="[]", style=dashed]; +"927 Constant_741" -> "928 Div_742" [label="[]", style=dashed]; +"928 Div_742" -> "929 Cast_743" [label="[]", style=dashed]; +"929 Cast_743" -> "930 Cast_744" [label="[]", style=dashed]; +"930 Cast_744" -> "936 Unsqueeze_747" [label="[]", style=dashed]; +"931 Constant_nncf_859" -> "932 Unsqueeze_745" [label="[1]", style=dashed]; +"932 Unsqueeze_745" -> "937 Concat_748" [label="[1]", style=dashed]; +"933 Constant_nncf_861" -> "934 Unsqueeze_746" [label="[1]", style=dashed]; +"934 Unsqueeze_746" -> "937 Concat_748" [label="[1]", style=dashed]; +"935 Constant_nncf_863" -> "936 Unsqueeze_747" [label="[1]", style=dashed]; +"936 Unsqueeze_747" -> "937 Concat_748" [label="[1]", style=dashed]; +"937 Concat_748" -> "938 Reshape_749" [label="[4]", style=dashed]; +"938 Reshape_749" -> "939 Transpose_750" [label="[]", style=solid]; +"939 Transpose_750" -> "944 Unsqueeze_753" [label="[]", style=solid]; +"939 Transpose_750" -> "976 MatMul_779" [label="[]", style=solid]; +"940 Transpose_751" -> "942 Unsqueeze_752" [label="[]", style=solid]; +"941 Constant_nncf_869" -> "942 Unsqueeze_752" [label="[1]", style=dashed]; +"942 Unsqueeze_752" -> "945 Concat_754" [label="[]", style=solid]; +"943 Constant_nncf_871" -> "944 Unsqueeze_753" [label="[1]", style=dashed]; +"944 Unsqueeze_753" -> "945 Concat_754" [label="[]", style=solid]; +"945 Concat_754" -> "3235 nncf_model_output_4" [label="[2, 1, 12, 8, 64]", style=solid]; +"946 MatMul_755" -> "948 Div_757" [label="[]", style=solid]; +"947 Constant_756" -> "948 Div_757" [label="[]", style=solid]; +"948 Div_757" -> "949 Shape_758" [label="[]", style=solid]; +"948 Div_757" -> "952 Shape_761" [label="[]", style=solid]; +"948 Div_757" -> "966 Mul_772" [label="[]", style=solid]; +"949 Shape_758" -> "951 Gather_760" [label="[-1]", style=dashed]; +"950 Constant_759" -> "951 Gather_760" [label="[]", style=dashed]; +"951 Gather_760" -> "955 Sub_764" [label="[]", style=dashed]; +"952 Shape_761" -> "954 Gather_763" [label="[-1]", style=dashed]; +"953 Constant_762" -> "954 Gather_763" [label="[]", style=dashed]; +"954 Gather_763" -> "955 Sub_764" [label="[]", style=dashed]; +"954 Gather_763" -> "959 Unsqueeze_766" [label="[]", style=dashed]; +"954 Gather_763" -> "963 Unsqueeze_769" [label="[]", style=dashed]; +"955 Sub_764" -> "957 Unsqueeze_765" [label="[]", style=dashed]; +"956 Constant_nncf_884" -> "957 Unsqueeze_765" [label="[1]", style=dashed]; +"957 Unsqueeze_765" -> "961 Slice_768" [label="[1]", style=dashed]; +"958 Constant_nncf_886" -> "959 Unsqueeze_766" [label="[1]", style=dashed]; +"959 Unsqueeze_766" -> "961 Slice_768" [label="[1]", style=dashed]; +"960 Constant_767" -> "961 Slice_768" [label="[1]", style=dashed]; +"961 Slice_768" -> "965 Slice_771" [label="[-1, -1, -1, -1]", style=solid]; +"962 Constant_nncf_890" -> "963 Unsqueeze_769" [label="[1]", style=dashed]; +"963 Unsqueeze_769" -> "965 Slice_771" [label="[1]", style=dashed]; +"964 Constant_770" -> "965 Slice_771" [label="[1]", style=dashed]; +"965 Slice_771" -> "966 Mul_772" [label="[-1, -1, -1, -1]", style=solid]; +"965 Slice_771" -> "968 Sub_774" [label="[-1, -1, -1, -1]", style=solid]; +"966 Mul_772" -> "971 Sub_777" [label="[]", style=solid]; +"967 Constant_773" -> "968 Sub_774" [label="[]", style=solid]; +"968 Sub_774" -> "970 Mul_776" [label="[-1, -1, -1, -1]", style=solid]; +"969 Constant_775" -> "970 Mul_776" [label="[]", style=solid]; +"970 Mul_776" -> "971 Sub_777" [label="[-1, -1, -1, -1]", style=solid]; +"971 Sub_777" -> "972 Shape_nncf_900" [label="[]", style=solid]; +"971 Sub_777" -> "973 Flatten_nncf_901" [label="[]", style=solid]; +"972 Shape_nncf_900" -> "975 Reshape_nncf_903" [label="[-1]", style=dashed]; +"973 Flatten_nncf_901" -> "974 Softmax_778" [label="[]", style=solid]; +"974 Softmax_778" -> "975 Reshape_nncf_903" [label="[]", style=solid]; +"975 Reshape_nncf_903" -> "976 MatMul_779" [label="[]", style=solid]; +"976 MatMul_779" -> "977 QuantizeLinear_1056_1" [label="[]", style=solid]; +"977 QuantizeLinear_1056_1" -> "978 DequantizeLinear_1056_1" [label="[]", style=dashed]; +"978 DequantizeLinear_1056_1" -> "979 Transpose_780" [label="[]", style=solid]; +"979 Transpose_780" -> "980 Shape_781" [label="[]", style=solid]; +"979 Transpose_780" -> "983 Shape_784" [label="[]", style=solid]; +"979 Transpose_780" -> "986 Shape_787" [label="[]", style=solid]; +"979 Transpose_780" -> "989 Shape_790" [label="[]", style=solid]; +"979 Transpose_780" -> "1000 Reshape_798" [label="[]", style=solid]; +"980 Shape_781" -> "982 Gather_783" [label="[-1]", style=dashed]; +"981 Constant_782" -> "982 Gather_783" [label="[]", style=dashed]; +"982 Gather_783" -> "994 Unsqueeze_794" [label="[]", style=dashed]; +"983 Shape_784" -> "985 Gather_786" [label="[-1]", style=dashed]; +"984 Constant_785" -> "985 Gather_786" [label="[]", style=dashed]; +"985 Gather_786" -> "996 Unsqueeze_795" [label="[]", style=dashed]; +"986 Shape_787" -> "988 Gather_789" [label="[-1]", style=dashed]; +"987 Constant_788" -> "988 Gather_789" [label="[]", style=dashed]; +"988 Gather_789" -> "992 Mul_793" [label="[]", style=dashed]; +"989 Shape_790" -> "991 Gather_792" [label="[-1]", style=dashed]; +"990 Constant_791" -> "991 Gather_792" [label="[]", style=dashed]; +"991 Gather_792" -> "992 Mul_793" [label="[]", style=dashed]; +"992 Mul_793" -> "998 Unsqueeze_796" [label="[]", style=dashed]; +"993 Constant_nncf_919" -> "994 Unsqueeze_794" [label="[1]", style=dashed]; +"994 Unsqueeze_794" -> "999 Concat_797" [label="[1]", style=dashed]; +"995 Constant_nncf_921" -> "996 Unsqueeze_795" [label="[1]", style=dashed]; +"996 Unsqueeze_795" -> "999 Concat_797" [label="[1]", style=dashed]; +"997 Constant_nncf_923" -> "998 Unsqueeze_796" [label="[1]", style=dashed]; +"998 Unsqueeze_796" -> "999 Concat_797" [label="[1]", style=dashed]; +"999 Concat_797" -> "1000 Reshape_798" [label="[3]", style=dashed]; +"1000 Reshape_798" -> "1001 Shape_799" [label="[]", style=solid]; +"1000 Reshape_798" -> "1004 Shape_802" [label="[]", style=solid]; +"1000 Reshape_798" -> "1007 Shape_805" [label="[]", style=solid]; +"1000 Reshape_798" -> "1013 Reshape_810" [label="[]", style=solid]; +"1001 Shape_799" -> "1003 Gather_801" [label="[-1]", style=dashed]; +"1002 Constant_800" -> "1003 Gather_801" [label="[]", style=dashed]; +"1003 Gather_801" -> "1018 Unsqueeze_812" [label="[]", style=dashed]; +"1004 Shape_802" -> "1006 Gather_804" [label="[-1]", style=dashed]; +"1005 Constant_803" -> "1006 Gather_804" [label="[]", style=dashed]; +"1006 Gather_804" -> "1020 Unsqueeze_813" [label="[]", style=dashed]; +"1007 Shape_805" -> "1009 Gather_807" [label="[-1]", style=dashed]; +"1008 Constant_806" -> "1009 Gather_807" [label="[]", style=dashed]; +"1009 Gather_807" -> "1011 Unsqueeze_808" [label="[]", style=dashed]; +"1010 Constant_nncf_936" -> "1011 Unsqueeze_808" [label="[1]", style=dashed]; +"1011 Unsqueeze_808" -> "1012 Concat_809" [label="[1]", style=dashed]; +"1012 Concat_809" -> "1013 Reshape_810" [label="[2]", style=dashed]; +"1013 Reshape_810" -> "1016 Gemm_811" [label="[]", style=solid]; +"1014 QuantizeLinear_h.3.attn.c_proj.weight_1" -> "1015 DequantizeLinear_h.3.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"1015 DequantizeLinear_h.3.attn.c_proj.weight_1" -> "1016 Gemm_811" [label="[768, 768]", style=solid]; +"1016 Gemm_811" -> "1022 Reshape_815" [label="[]", style=solid]; +"1017 Constant_nncf_941" -> "1018 Unsqueeze_812" [label="[1]", style=dashed]; +"1018 Unsqueeze_812" -> "1021 Concat_814" [label="[1]", style=dashed]; +"1019 Constant_nncf_943" -> "1020 Unsqueeze_813" [label="[1]", style=dashed]; +"1020 Unsqueeze_813" -> "1021 Concat_814" [label="[1]", style=dashed]; +"1021 Concat_814" -> "1022 Reshape_815" [label="[3]", style=dashed]; +"1022 Reshape_815" -> "1023 Add_816" [label="[]", style=solid]; +"1023 Add_816" -> "1024 ReduceMean_817" [label="[]", style=solid]; +"1023 Add_816" -> "1025 Sub_818" [label="[]", style=solid]; +"1023 Add_816" -> "1096 Add_875" [label="[]", style=solid]; +"1024 ReduceMean_817" -> "1025 Sub_818" [label="[]", style=solid]; +"1025 Sub_818" -> "1027 Pow_820" [label="[]", style=solid]; +"1025 Sub_818" -> "1032 Div_825" [label="[]", style=solid]; +"1026 Constant_819" -> "1027 Pow_820" [label="[]", style=solid]; +"1027 Pow_820" -> "1028 ReduceMean_821" [label="[]", style=solid]; +"1028 ReduceMean_821" -> "1030 Add_823" [label="[]", style=solid]; +"1029 Constant_822" -> "1030 Add_823" [label="[]", style=solid]; +"1030 Add_823" -> "1031 Sqrt_824" [label="[]", style=solid]; +"1031 Sqrt_824" -> "1032 Div_825" [label="[]", style=solid]; +"1032 Div_825" -> "1033 Mul_826" [label="[]", style=solid]; +"1033 Mul_826" -> "1034 Add_827" [label="[]", style=solid]; +"1034 Add_827" -> "1035 QuantizeLinear_1108_1" [label="[]", style=solid]; +"1035 QuantizeLinear_1108_1" -> "1036 DequantizeLinear_1108_1" [label="[]", style=dashed]; +"1036 DequantizeLinear_1108_1" -> "1037 Shape_828" [label="[]", style=solid]; +"1036 DequantizeLinear_1108_1" -> "1040 Shape_831" [label="[]", style=solid]; +"1036 DequantizeLinear_1108_1" -> "1043 Shape_834" [label="[]", style=solid]; +"1036 DequantizeLinear_1108_1" -> "1049 Reshape_839" [label="[]", style=solid]; +"1037 Shape_828" -> "1039 Gather_830" [label="[-1]", style=dashed]; +"1038 Constant_829" -> "1039 Gather_830" [label="[]", style=dashed]; +"1039 Gather_830" -> "1054 Unsqueeze_841" [label="[]", style=dashed]; +"1040 Shape_831" -> "1042 Gather_833" [label="[-1]", style=dashed]; +"1041 Constant_832" -> "1042 Gather_833" [label="[]", style=dashed]; +"1042 Gather_833" -> "1056 Unsqueeze_842" [label="[]", style=dashed]; +"1043 Shape_834" -> "1045 Gather_836" [label="[-1]", style=dashed]; +"1044 Constant_835" -> "1045 Gather_836" [label="[]", style=dashed]; +"1045 Gather_836" -> "1047 Unsqueeze_837" [label="[]", style=dashed]; +"1046 Constant_nncf_968" -> "1047 Unsqueeze_837" [label="[1]", style=dashed]; +"1047 Unsqueeze_837" -> "1048 Concat_838" [label="[1]", style=dashed]; +"1048 Concat_838" -> "1049 Reshape_839" [label="[2]", style=dashed]; +"1049 Reshape_839" -> "1052 Gemm_840" [label="[]", style=solid]; +"1050 QuantizeLinear_h.3.mlp.c_fc.weight_1" -> "1051 DequantizeLinear_h.3.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"1051 DequantizeLinear_h.3.mlp.c_fc.weight_1" -> "1052 Gemm_840" [label="[768, 3072]", style=solid]; +"1052 Gemm_840" -> "1058 Reshape_844" [label="[]", style=solid]; +"1053 Constant_nncf_973" -> "1054 Unsqueeze_841" [label="[1]", style=dashed]; +"1054 Unsqueeze_841" -> "1057 Concat_843" [label="[1]", style=dashed]; +"1055 Constant_nncf_975" -> "1056 Unsqueeze_842" [label="[1]", style=dashed]; +"1056 Unsqueeze_842" -> "1057 Concat_843" [label="[1]", style=dashed]; +"1057 Concat_843" -> "1058 Reshape_844" [label="[3]", style=dashed]; +"1058 Reshape_844" -> "1060 Mul_846" [label="[]", style=solid]; +"1058 Reshape_844" -> "1062 Pow_848" [label="[]", style=solid]; +"1058 Reshape_844" -> "1065 Add_851" [label="[]", style=solid]; +"1059 Constant_845" -> "1060 Mul_846" [label="[]", style=solid]; +"1060 Mul_846" -> "1071 Mul_857" [label="[]", style=solid]; +"1061 Constant_847" -> "1062 Pow_848" [label="[]", style=solid]; +"1062 Pow_848" -> "1064 Mul_850" [label="[]", style=solid]; +"1063 Constant_849" -> "1064 Mul_850" [label="[]", style=solid]; +"1064 Mul_850" -> "1065 Add_851" [label="[]", style=solid]; +"1065 Add_851" -> "1067 Mul_853" [label="[]", style=solid]; +"1066 Constant_852" -> "1067 Mul_853" [label="[]", style=solid]; +"1067 Mul_853" -> "1068 Tanh_854" [label="[]", style=solid]; +"1068 Tanh_854" -> "1070 Add_856" [label="[]", style=solid]; +"1069 Constant_855" -> "1070 Add_856" [label="[]", style=solid]; +"1070 Add_856" -> "1071 Mul_857" [label="[]", style=solid]; +"1071 Mul_857" -> "1072 QuantizeLinear_1142_1" [label="[]", style=solid]; +"1072 QuantizeLinear_1142_1" -> "1073 DequantizeLinear_1142_1" [label="[]", style=dashed]; +"1073 DequantizeLinear_1142_1" -> "1074 Shape_858" [label="[]", style=solid]; +"1073 DequantizeLinear_1142_1" -> "1077 Shape_861" [label="[]", style=solid]; +"1073 DequantizeLinear_1142_1" -> "1080 Shape_864" [label="[]", style=solid]; +"1073 DequantizeLinear_1142_1" -> "1086 Reshape_869" [label="[]", style=solid]; +"1074 Shape_858" -> "1076 Gather_860" [label="[-1]", style=dashed]; +"1075 Constant_859" -> "1076 Gather_860" [label="[]", style=dashed]; +"1076 Gather_860" -> "1091 Unsqueeze_871" [label="[]", style=dashed]; +"1077 Shape_861" -> "1079 Gather_863" [label="[-1]", style=dashed]; +"1078 Constant_862" -> "1079 Gather_863" [label="[]", style=dashed]; +"1079 Gather_863" -> "1093 Unsqueeze_872" [label="[]", style=dashed]; +"1080 Shape_864" -> "1082 Gather_866" [label="[-1]", style=dashed]; +"1081 Constant_865" -> "1082 Gather_866" [label="[]", style=dashed]; +"1082 Gather_866" -> "1084 Unsqueeze_867" [label="[]", style=dashed]; +"1083 Constant_nncf_1001" -> "1084 Unsqueeze_867" [label="[1]", style=dashed]; +"1084 Unsqueeze_867" -> "1085 Concat_868" [label="[1]", style=dashed]; +"1085 Concat_868" -> "1086 Reshape_869" [label="[2]", style=dashed]; +"1086 Reshape_869" -> "1089 Gemm_870" [label="[]", style=solid]; +"1087 QuantizeLinear_h.3.mlp.c_proj.weight_1" -> "1088 DequantizeLinear_h.3.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"1088 DequantizeLinear_h.3.mlp.c_proj.weight_1" -> "1089 Gemm_870" [label="[3072, 768]", style=solid]; +"1089 Gemm_870" -> "1095 Reshape_874" [label="[]", style=solid]; +"1090 Constant_nncf_1006" -> "1091 Unsqueeze_871" [label="[1]", style=dashed]; +"1091 Unsqueeze_871" -> "1094 Concat_873" [label="[1]", style=dashed]; +"1092 Constant_nncf_1008" -> "1093 Unsqueeze_872" [label="[1]", style=dashed]; +"1093 Unsqueeze_872" -> "1094 Concat_873" [label="[1]", style=dashed]; +"1094 Concat_873" -> "1095 Reshape_874" [label="[3]", style=dashed]; +"1095 Reshape_874" -> "1096 Add_875" [label="[]", style=solid]; +"1096 Add_875" -> "1097 ReduceMean_876" [label="[]", style=solid]; +"1096 Add_875" -> "1098 Sub_877" [label="[]", style=solid]; +"1096 Add_875" -> "1287 Add_1027" [label="[]", style=solid]; +"1097 ReduceMean_876" -> "1098 Sub_877" [label="[]", style=solid]; +"1098 Sub_877" -> "1100 Pow_879" [label="[]", style=solid]; +"1098 Sub_877" -> "1105 Div_884" [label="[]", style=solid]; +"1099 Constant_878" -> "1100 Pow_879" [label="[]", style=solid]; +"1100 Pow_879" -> "1101 ReduceMean_880" [label="[]", style=solid]; +"1101 ReduceMean_880" -> "1103 Add_882" [label="[]", style=solid]; +"1102 Constant_881" -> "1103 Add_882" [label="[]", style=solid]; +"1103 Add_882" -> "1104 Sqrt_883" [label="[]", style=solid]; +"1104 Sqrt_883" -> "1105 Div_884" [label="[]", style=solid]; +"1105 Div_884" -> "1106 Mul_885" [label="[]", style=solid]; +"1106 Mul_885" -> "1107 Add_886" [label="[]", style=solid]; +"1107 Add_886" -> "1108 QuantizeLinear_1175_1" [label="[]", style=solid]; +"1108 QuantizeLinear_1175_1" -> "1109 DequantizeLinear_1175_1" [label="[]", style=dashed]; +"1109 DequantizeLinear_1175_1" -> "1110 Shape_887" [label="[]", style=solid]; +"1109 DequantizeLinear_1175_1" -> "1113 Shape_890" [label="[]", style=solid]; +"1109 DequantizeLinear_1175_1" -> "1116 Shape_893" [label="[]", style=solid]; +"1109 DequantizeLinear_1175_1" -> "1122 Reshape_898" [label="[]", style=solid]; +"1110 Shape_887" -> "1112 Gather_889" [label="[-1]", style=dashed]; +"1111 Constant_888" -> "1112 Gather_889" [label="[]", style=dashed]; +"1112 Gather_889" -> "1127 Unsqueeze_900" [label="[]", style=dashed]; +"1113 Shape_890" -> "1115 Gather_892" [label="[-1]", style=dashed]; +"1114 Constant_891" -> "1115 Gather_892" [label="[]", style=dashed]; +"1115 Gather_892" -> "1129 Unsqueeze_901" [label="[]", style=dashed]; +"1116 Shape_893" -> "1118 Gather_895" [label="[-1]", style=dashed]; +"1117 Constant_894" -> "1118 Gather_895" [label="[]", style=dashed]; +"1118 Gather_895" -> "1120 Unsqueeze_896" [label="[]", style=dashed]; +"1119 Constant_nncf_1033" -> "1120 Unsqueeze_896" [label="[1]", style=dashed]; +"1120 Unsqueeze_896" -> "1121 Concat_897" [label="[1]", style=dashed]; +"1121 Concat_897" -> "1122 Reshape_898" [label="[2]", style=dashed]; +"1122 Reshape_898" -> "1125 Gemm_899" [label="[]", style=solid]; +"1123 QuantizeLinear_h.4.attn.c_attn.weight_1" -> "1124 DequantizeLinear_h.4.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"1124 DequantizeLinear_h.4.attn.c_attn.weight_1" -> "1125 Gemm_899" [label="[768, 2304]", style=solid]; +"1125 Gemm_899" -> "1131 Reshape_903" [label="[]", style=solid]; +"1126 Constant_nncf_1038" -> "1127 Unsqueeze_900" [label="[1]", style=dashed]; +"1127 Unsqueeze_900" -> "1130 Concat_902" [label="[1]", style=dashed]; +"1128 Constant_nncf_1040" -> "1129 Unsqueeze_901" [label="[1]", style=dashed]; +"1129 Unsqueeze_901" -> "1130 Concat_902" [label="[1]", style=dashed]; +"1130 Concat_902" -> "1131 Reshape_903" [label="[3]", style=dashed]; +"1131 Reshape_903" -> "1133 Split_904" [label="[]", style=solid]; +"1132 Constant_nncf_1044" -> "1133 Split_904" [label="[3]", style=dashed]; +"1133 Split_904" -> "1134 QuantizeLinear_query.9_1" [label="[]", style=solid]; +"1133 Split_904" -> "1136 Shape_905" [label="[]", style=solid]; +"1133 Split_904" -> "1139 Shape_908" [label="[]", style=solid]; +"1133 Split_904" -> "1142 Shape_911" [label="[]", style=solid]; +"1133 Split_904" -> "1158 Shape_924" [label="[]", style=solid]; +"1133 Split_904" -> "1161 Shape_927" [label="[]", style=solid]; +"1133 Split_904" -> "1164 Shape_930" [label="[]", style=solid]; +"1133 Split_904" -> "1178 Reshape_941" [label="[]", style=solid]; +"1133 Split_904" -> "1182 Shape_943" [label="[]", style=solid]; +"1133 Split_904" -> "1185 Shape_946" [label="[]", style=solid]; +"1133 Split_904" -> "1188 Shape_949" [label="[]", style=solid]; +"1133 Split_904" -> "1202 Reshape_960" [label="[]", style=solid]; +"1134 QuantizeLinear_query.9_1" -> "1135 DequantizeLinear_query.9_1" [label="[]", style=dashed]; +"1135 DequantizeLinear_query.9_1" -> "1156 Reshape_922" [label="[]", style=solid]; +"1136 Shape_905" -> "1138 Gather_907" [label="[-1]", style=dashed]; +"1137 Constant_906" -> "1138 Gather_907" [label="[]", style=dashed]; +"1138 Gather_907" -> "1150 Unsqueeze_918" [label="[]", style=dashed]; +"1139 Shape_908" -> "1141 Gather_910" [label="[-1]", style=dashed]; +"1140 Constant_909" -> "1141 Gather_910" [label="[]", style=dashed]; +"1141 Gather_910" -> "1152 Unsqueeze_919" [label="[]", style=dashed]; +"1142 Shape_911" -> "1144 Gather_913" [label="[-1]", style=dashed]; +"1143 Constant_912" -> "1144 Gather_913" [label="[]", style=dashed]; +"1144 Gather_913" -> "1146 Div_915" [label="[]", style=dashed]; +"1145 Constant_914" -> "1146 Div_915" [label="[]", style=dashed]; +"1146 Div_915" -> "1147 Cast_916" [label="[]", style=dashed]; +"1147 Cast_916" -> "1148 Cast_917" [label="[]", style=dashed]; +"1148 Cast_917" -> "1154 Unsqueeze_920" [label="[]", style=dashed]; +"1149 Constant_nncf_1059" -> "1150 Unsqueeze_918" [label="[1]", style=dashed]; +"1150 Unsqueeze_918" -> "1155 Concat_921" [label="[1]", style=dashed]; +"1151 Constant_nncf_1061" -> "1152 Unsqueeze_919" [label="[1]", style=dashed]; +"1152 Unsqueeze_919" -> "1155 Concat_921" [label="[1]", style=dashed]; +"1153 Constant_nncf_1063" -> "1154 Unsqueeze_920" [label="[1]", style=dashed]; +"1154 Unsqueeze_920" -> "1155 Concat_921" [label="[1]", style=dashed]; +"1155 Concat_921" -> "1156 Reshape_922" [label="[4]", style=dashed]; +"1156 Reshape_922" -> "1157 Transpose_923" [label="[]", style=solid]; +"1157 Transpose_923" -> "1210 MatMul_966" [label="[]", style=solid]; +"1158 Shape_924" -> "1160 Gather_926" [label="[-1]", style=dashed]; +"1159 Constant_925" -> "1160 Gather_926" [label="[]", style=dashed]; +"1160 Gather_926" -> "1172 Unsqueeze_937" [label="[]", style=dashed]; +"1161 Shape_927" -> "1163 Gather_929" [label="[-1]", style=dashed]; +"1162 Constant_928" -> "1163 Gather_929" [label="[]", style=dashed]; +"1163 Gather_929" -> "1174 Unsqueeze_938" [label="[]", style=dashed]; +"1164 Shape_930" -> "1166 Gather_932" [label="[-1]", style=dashed]; +"1165 Constant_931" -> "1166 Gather_932" [label="[]", style=dashed]; +"1166 Gather_932" -> "1168 Div_934" [label="[]", style=dashed]; +"1167 Constant_933" -> "1168 Div_934" [label="[]", style=dashed]; +"1168 Div_934" -> "1169 Cast_935" [label="[]", style=dashed]; +"1169 Cast_935" -> "1170 Cast_936" [label="[]", style=dashed]; +"1170 Cast_936" -> "1176 Unsqueeze_939" [label="[]", style=dashed]; +"1171 Constant_nncf_1081" -> "1172 Unsqueeze_937" [label="[1]", style=dashed]; +"1172 Unsqueeze_937" -> "1177 Concat_940" [label="[1]", style=dashed]; +"1173 Constant_nncf_1083" -> "1174 Unsqueeze_938" [label="[1]", style=dashed]; +"1174 Unsqueeze_938" -> "1177 Concat_940" [label="[1]", style=dashed]; +"1175 Constant_nncf_1085" -> "1176 Unsqueeze_939" [label="[1]", style=dashed]; +"1176 Unsqueeze_939" -> "1177 Concat_940" [label="[1]", style=dashed]; +"1177 Concat_940" -> "1178 Reshape_941" [label="[4]", style=dashed]; +"1178 Reshape_941" -> "1179 QuantizeLinear_1240_1" [label="[]", style=solid]; +"1178 Reshape_941" -> "1204 Transpose_962" [label="[]", style=solid]; +"1179 QuantizeLinear_1240_1" -> "1180 DequantizeLinear_1240_1" [label="[]", style=dashed]; +"1180 DequantizeLinear_1240_1" -> "1181 Transpose_942" [label="[]", style=solid]; +"1181 Transpose_942" -> "1210 MatMul_966" [label="[]", style=solid]; +"1182 Shape_943" -> "1184 Gather_945" [label="[-1]", style=dashed]; +"1183 Constant_944" -> "1184 Gather_945" [label="[]", style=dashed]; +"1184 Gather_945" -> "1196 Unsqueeze_956" [label="[]", style=dashed]; +"1185 Shape_946" -> "1187 Gather_948" [label="[-1]", style=dashed]; +"1186 Constant_947" -> "1187 Gather_948" [label="[]", style=dashed]; +"1187 Gather_948" -> "1198 Unsqueeze_957" [label="[]", style=dashed]; +"1188 Shape_949" -> "1190 Gather_951" [label="[-1]", style=dashed]; +"1189 Constant_950" -> "1190 Gather_951" [label="[]", style=dashed]; +"1190 Gather_951" -> "1192 Div_953" [label="[]", style=dashed]; +"1191 Constant_952" -> "1192 Div_953" [label="[]", style=dashed]; +"1192 Div_953" -> "1193 Cast_954" [label="[]", style=dashed]; +"1193 Cast_954" -> "1194 Cast_955" [label="[]", style=dashed]; +"1194 Cast_955" -> "1200 Unsqueeze_958" [label="[]", style=dashed]; +"1195 Constant_nncf_1103" -> "1196 Unsqueeze_956" [label="[1]", style=dashed]; +"1196 Unsqueeze_956" -> "1201 Concat_959" [label="[1]", style=dashed]; +"1197 Constant_nncf_1105" -> "1198 Unsqueeze_957" [label="[1]", style=dashed]; +"1198 Unsqueeze_957" -> "1201 Concat_959" [label="[1]", style=dashed]; +"1199 Constant_nncf_1107" -> "1200 Unsqueeze_958" [label="[1]", style=dashed]; +"1200 Unsqueeze_958" -> "1201 Concat_959" [label="[1]", style=dashed]; +"1201 Concat_959" -> "1202 Reshape_960" [label="[4]", style=dashed]; +"1202 Reshape_960" -> "1203 Transpose_961" [label="[]", style=solid]; +"1203 Transpose_961" -> "1208 Unsqueeze_964" [label="[]", style=solid]; +"1203 Transpose_961" -> "1240 MatMul_990" [label="[]", style=solid]; +"1204 Transpose_962" -> "1206 Unsqueeze_963" [label="[]", style=solid]; +"1205 Constant_nncf_1113" -> "1206 Unsqueeze_963" [label="[1]", style=dashed]; +"1206 Unsqueeze_963" -> "1209 Concat_965" [label="[]", style=solid]; +"1207 Constant_nncf_1115" -> "1208 Unsqueeze_964" [label="[1]", style=dashed]; +"1208 Unsqueeze_964" -> "1209 Concat_965" [label="[]", style=solid]; +"1209 Concat_965" -> "3236 nncf_model_output_5" [label="[2, 1, 12, 8, 64]", style=solid]; +"1210 MatMul_966" -> "1212 Div_968" [label="[]", style=solid]; +"1211 Constant_967" -> "1212 Div_968" [label="[]", style=solid]; +"1212 Div_968" -> "1213 Shape_969" [label="[]", style=solid]; +"1212 Div_968" -> "1216 Shape_972" [label="[]", style=solid]; +"1212 Div_968" -> "1230 Mul_983" [label="[]", style=solid]; +"1213 Shape_969" -> "1215 Gather_971" [label="[-1]", style=dashed]; +"1214 Constant_970" -> "1215 Gather_971" [label="[]", style=dashed]; +"1215 Gather_971" -> "1219 Sub_975" [label="[]", style=dashed]; +"1216 Shape_972" -> "1218 Gather_974" [label="[-1]", style=dashed]; +"1217 Constant_973" -> "1218 Gather_974" [label="[]", style=dashed]; +"1218 Gather_974" -> "1219 Sub_975" [label="[]", style=dashed]; +"1218 Gather_974" -> "1223 Unsqueeze_977" [label="[]", style=dashed]; +"1218 Gather_974" -> "1227 Unsqueeze_980" [label="[]", style=dashed]; +"1219 Sub_975" -> "1221 Unsqueeze_976" [label="[]", style=dashed]; +"1220 Constant_nncf_1128" -> "1221 Unsqueeze_976" [label="[1]", style=dashed]; +"1221 Unsqueeze_976" -> "1225 Slice_979" [label="[1]", style=dashed]; +"1222 Constant_nncf_1130" -> "1223 Unsqueeze_977" [label="[1]", style=dashed]; +"1223 Unsqueeze_977" -> "1225 Slice_979" [label="[1]", style=dashed]; +"1224 Constant_978" -> "1225 Slice_979" [label="[1]", style=dashed]; +"1225 Slice_979" -> "1229 Slice_982" [label="[-1, -1, -1, -1]", style=solid]; +"1226 Constant_nncf_1134" -> "1227 Unsqueeze_980" [label="[1]", style=dashed]; +"1227 Unsqueeze_980" -> "1229 Slice_982" [label="[1]", style=dashed]; +"1228 Constant_981" -> "1229 Slice_982" [label="[1]", style=dashed]; +"1229 Slice_982" -> "1230 Mul_983" [label="[-1, -1, -1, -1]", style=solid]; +"1229 Slice_982" -> "1232 Sub_985" [label="[-1, -1, -1, -1]", style=solid]; +"1230 Mul_983" -> "1235 Sub_988" [label="[]", style=solid]; +"1231 Constant_984" -> "1232 Sub_985" [label="[]", style=solid]; +"1232 Sub_985" -> "1234 Mul_987" [label="[-1, -1, -1, -1]", style=solid]; +"1233 Constant_986" -> "1234 Mul_987" [label="[]", style=solid]; +"1234 Mul_987" -> "1235 Sub_988" [label="[-1, -1, -1, -1]", style=solid]; +"1235 Sub_988" -> "1236 Shape_nncf_1144" [label="[]", style=solid]; +"1235 Sub_988" -> "1237 Flatten_nncf_1145" [label="[]", style=solid]; +"1236 Shape_nncf_1144" -> "1239 Reshape_nncf_1147" [label="[-1]", style=dashed]; +"1237 Flatten_nncf_1145" -> "1238 Softmax_989" [label="[]", style=solid]; +"1238 Softmax_989" -> "1239 Reshape_nncf_1147" [label="[]", style=solid]; +"1239 Reshape_nncf_1147" -> "1240 MatMul_990" [label="[]", style=solid]; +"1240 MatMul_990" -> "1241 QuantizeLinear_1297_1" [label="[]", style=solid]; +"1241 QuantizeLinear_1297_1" -> "1242 DequantizeLinear_1297_1" [label="[]", style=dashed]; +"1242 DequantizeLinear_1297_1" -> "1243 Transpose_991" [label="[]", style=solid]; +"1243 Transpose_991" -> "1244 Shape_992" [label="[]", style=solid]; +"1243 Transpose_991" -> "1247 Shape_995" [label="[]", style=solid]; +"1243 Transpose_991" -> "1250 Shape_998" [label="[]", style=solid]; +"1243 Transpose_991" -> "1253 Shape_1001" [label="[]", style=solid]; +"1243 Transpose_991" -> "1264 Reshape_1009" [label="[]", style=solid]; +"1244 Shape_992" -> "1246 Gather_994" [label="[-1]", style=dashed]; +"1245 Constant_993" -> "1246 Gather_994" [label="[]", style=dashed]; +"1246 Gather_994" -> "1258 Unsqueeze_1005" [label="[]", style=dashed]; +"1247 Shape_995" -> "1249 Gather_997" [label="[-1]", style=dashed]; +"1248 Constant_996" -> "1249 Gather_997" [label="[]", style=dashed]; +"1249 Gather_997" -> "1260 Unsqueeze_1006" [label="[]", style=dashed]; +"1250 Shape_998" -> "1252 Gather_1000" [label="[-1]", style=dashed]; +"1251 Constant_999" -> "1252 Gather_1000" [label="[]", style=dashed]; +"1252 Gather_1000" -> "1256 Mul_1004" [label="[]", style=dashed]; +"1253 Shape_1001" -> "1255 Gather_1003" [label="[-1]", style=dashed]; +"1254 Constant_1002" -> "1255 Gather_1003" [label="[]", style=dashed]; +"1255 Gather_1003" -> "1256 Mul_1004" [label="[]", style=dashed]; +"1256 Mul_1004" -> "1262 Unsqueeze_1007" [label="[]", style=dashed]; +"1257 Constant_nncf_1163" -> "1258 Unsqueeze_1005" [label="[1]", style=dashed]; +"1258 Unsqueeze_1005" -> "1263 Concat_1008" [label="[1]", style=dashed]; +"1259 Constant_nncf_1165" -> "1260 Unsqueeze_1006" [label="[1]", style=dashed]; +"1260 Unsqueeze_1006" -> "1263 Concat_1008" [label="[1]", style=dashed]; +"1261 Constant_nncf_1167" -> "1262 Unsqueeze_1007" [label="[1]", style=dashed]; +"1262 Unsqueeze_1007" -> "1263 Concat_1008" [label="[1]", style=dashed]; +"1263 Concat_1008" -> "1264 Reshape_1009" [label="[3]", style=dashed]; +"1264 Reshape_1009" -> "1265 Shape_1010" [label="[]", style=solid]; +"1264 Reshape_1009" -> "1268 Shape_1013" [label="[]", style=solid]; +"1264 Reshape_1009" -> "1271 Shape_1016" [label="[]", style=solid]; +"1264 Reshape_1009" -> "1277 Reshape_1021" [label="[]", style=solid]; +"1265 Shape_1010" -> "1267 Gather_1012" [label="[-1]", style=dashed]; +"1266 Constant_1011" -> "1267 Gather_1012" [label="[]", style=dashed]; +"1267 Gather_1012" -> "1282 Unsqueeze_1023" [label="[]", style=dashed]; +"1268 Shape_1013" -> "1270 Gather_1015" [label="[-1]", style=dashed]; +"1269 Constant_1014" -> "1270 Gather_1015" [label="[]", style=dashed]; +"1270 Gather_1015" -> "1284 Unsqueeze_1024" [label="[]", style=dashed]; +"1271 Shape_1016" -> "1273 Gather_1018" [label="[-1]", style=dashed]; +"1272 Constant_1017" -> "1273 Gather_1018" [label="[]", style=dashed]; +"1273 Gather_1018" -> "1275 Unsqueeze_1019" [label="[]", style=dashed]; +"1274 Constant_nncf_1180" -> "1275 Unsqueeze_1019" [label="[1]", style=dashed]; +"1275 Unsqueeze_1019" -> "1276 Concat_1020" [label="[1]", style=dashed]; +"1276 Concat_1020" -> "1277 Reshape_1021" [label="[2]", style=dashed]; +"1277 Reshape_1021" -> "1280 Gemm_1022" [label="[]", style=solid]; +"1278 QuantizeLinear_h.4.attn.c_proj.weight_1" -> "1279 DequantizeLinear_h.4.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"1279 DequantizeLinear_h.4.attn.c_proj.weight_1" -> "1280 Gemm_1022" [label="[768, 768]", style=solid]; +"1280 Gemm_1022" -> "1286 Reshape_1026" [label="[]", style=solid]; +"1281 Constant_nncf_1185" -> "1282 Unsqueeze_1023" [label="[1]", style=dashed]; +"1282 Unsqueeze_1023" -> "1285 Concat_1025" [label="[1]", style=dashed]; +"1283 Constant_nncf_1187" -> "1284 Unsqueeze_1024" [label="[1]", style=dashed]; +"1284 Unsqueeze_1024" -> "1285 Concat_1025" [label="[1]", style=dashed]; +"1285 Concat_1025" -> "1286 Reshape_1026" [label="[3]", style=dashed]; +"1286 Reshape_1026" -> "1287 Add_1027" [label="[]", style=solid]; +"1287 Add_1027" -> "1288 ReduceMean_1028" [label="[]", style=solid]; +"1287 Add_1027" -> "1289 Sub_1029" [label="[]", style=solid]; +"1287 Add_1027" -> "1360 Add_1086" [label="[]", style=solid]; +"1288 ReduceMean_1028" -> "1289 Sub_1029" [label="[]", style=solid]; +"1289 Sub_1029" -> "1291 Pow_1031" [label="[]", style=solid]; +"1289 Sub_1029" -> "1296 Div_1036" [label="[]", style=solid]; +"1290 Constant_1030" -> "1291 Pow_1031" [label="[]", style=solid]; +"1291 Pow_1031" -> "1292 ReduceMean_1032" [label="[]", style=solid]; +"1292 ReduceMean_1032" -> "1294 Add_1034" [label="[]", style=solid]; +"1293 Constant_1033" -> "1294 Add_1034" [label="[]", style=solid]; +"1294 Add_1034" -> "1295 Sqrt_1035" [label="[]", style=solid]; +"1295 Sqrt_1035" -> "1296 Div_1036" [label="[]", style=solid]; +"1296 Div_1036" -> "1297 Mul_1037" [label="[]", style=solid]; +"1297 Mul_1037" -> "1298 Add_1038" [label="[]", style=solid]; +"1298 Add_1038" -> "1299 QuantizeLinear_1349_1" [label="[]", style=solid]; +"1299 QuantizeLinear_1349_1" -> "1300 DequantizeLinear_1349_1" [label="[]", style=dashed]; +"1300 DequantizeLinear_1349_1" -> "1301 Shape_1039" [label="[]", style=solid]; +"1300 DequantizeLinear_1349_1" -> "1304 Shape_1042" [label="[]", style=solid]; +"1300 DequantizeLinear_1349_1" -> "1307 Shape_1045" [label="[]", style=solid]; +"1300 DequantizeLinear_1349_1" -> "1313 Reshape_1050" [label="[]", style=solid]; +"1301 Shape_1039" -> "1303 Gather_1041" [label="[-1]", style=dashed]; +"1302 Constant_1040" -> "1303 Gather_1041" [label="[]", style=dashed]; +"1303 Gather_1041" -> "1318 Unsqueeze_1052" [label="[]", style=dashed]; +"1304 Shape_1042" -> "1306 Gather_1044" [label="[-1]", style=dashed]; +"1305 Constant_1043" -> "1306 Gather_1044" [label="[]", style=dashed]; +"1306 Gather_1044" -> "1320 Unsqueeze_1053" [label="[]", style=dashed]; +"1307 Shape_1045" -> "1309 Gather_1047" [label="[-1]", style=dashed]; +"1308 Constant_1046" -> "1309 Gather_1047" [label="[]", style=dashed]; +"1309 Gather_1047" -> "1311 Unsqueeze_1048" [label="[]", style=dashed]; +"1310 Constant_nncf_1212" -> "1311 Unsqueeze_1048" [label="[1]", style=dashed]; +"1311 Unsqueeze_1048" -> "1312 Concat_1049" [label="[1]", style=dashed]; +"1312 Concat_1049" -> "1313 Reshape_1050" [label="[2]", style=dashed]; +"1313 Reshape_1050" -> "1316 Gemm_1051" [label="[]", style=solid]; +"1314 QuantizeLinear_h.4.mlp.c_fc.weight_1" -> "1315 DequantizeLinear_h.4.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"1315 DequantizeLinear_h.4.mlp.c_fc.weight_1" -> "1316 Gemm_1051" [label="[768, 3072]", style=solid]; +"1316 Gemm_1051" -> "1322 Reshape_1055" [label="[]", style=solid]; +"1317 Constant_nncf_1217" -> "1318 Unsqueeze_1052" [label="[1]", style=dashed]; +"1318 Unsqueeze_1052" -> "1321 Concat_1054" [label="[1]", style=dashed]; +"1319 Constant_nncf_1219" -> "1320 Unsqueeze_1053" [label="[1]", style=dashed]; +"1320 Unsqueeze_1053" -> "1321 Concat_1054" [label="[1]", style=dashed]; +"1321 Concat_1054" -> "1322 Reshape_1055" [label="[3]", style=dashed]; +"1322 Reshape_1055" -> "1324 Mul_1057" [label="[]", style=solid]; +"1322 Reshape_1055" -> "1326 Pow_1059" [label="[]", style=solid]; +"1322 Reshape_1055" -> "1329 Add_1062" [label="[]", style=solid]; +"1323 Constant_1056" -> "1324 Mul_1057" [label="[]", style=solid]; +"1324 Mul_1057" -> "1335 Mul_1068" [label="[]", style=solid]; +"1325 Constant_1058" -> "1326 Pow_1059" [label="[]", style=solid]; +"1326 Pow_1059" -> "1328 Mul_1061" [label="[]", style=solid]; +"1327 Constant_1060" -> "1328 Mul_1061" [label="[]", style=solid]; +"1328 Mul_1061" -> "1329 Add_1062" [label="[]", style=solid]; +"1329 Add_1062" -> "1331 Mul_1064" [label="[]", style=solid]; +"1330 Constant_1063" -> "1331 Mul_1064" [label="[]", style=solid]; +"1331 Mul_1064" -> "1332 Tanh_1065" [label="[]", style=solid]; +"1332 Tanh_1065" -> "1334 Add_1067" [label="[]", style=solid]; +"1333 Constant_1066" -> "1334 Add_1067" [label="[]", style=solid]; +"1334 Add_1067" -> "1335 Mul_1068" [label="[]", style=solid]; +"1335 Mul_1068" -> "1336 QuantizeLinear_1383_1" [label="[]", style=solid]; +"1336 QuantizeLinear_1383_1" -> "1337 DequantizeLinear_1383_1" [label="[]", style=dashed]; +"1337 DequantizeLinear_1383_1" -> "1338 Shape_1069" [label="[]", style=solid]; +"1337 DequantizeLinear_1383_1" -> "1341 Shape_1072" [label="[]", style=solid]; +"1337 DequantizeLinear_1383_1" -> "1344 Shape_1075" [label="[]", style=solid]; +"1337 DequantizeLinear_1383_1" -> "1350 Reshape_1080" [label="[]", style=solid]; +"1338 Shape_1069" -> "1340 Gather_1071" [label="[-1]", style=dashed]; +"1339 Constant_1070" -> "1340 Gather_1071" [label="[]", style=dashed]; +"1340 Gather_1071" -> "1355 Unsqueeze_1082" [label="[]", style=dashed]; +"1341 Shape_1072" -> "1343 Gather_1074" [label="[-1]", style=dashed]; +"1342 Constant_1073" -> "1343 Gather_1074" [label="[]", style=dashed]; +"1343 Gather_1074" -> "1357 Unsqueeze_1083" [label="[]", style=dashed]; +"1344 Shape_1075" -> "1346 Gather_1077" [label="[-1]", style=dashed]; +"1345 Constant_1076" -> "1346 Gather_1077" [label="[]", style=dashed]; +"1346 Gather_1077" -> "1348 Unsqueeze_1078" [label="[]", style=dashed]; +"1347 Constant_nncf_1245" -> "1348 Unsqueeze_1078" [label="[1]", style=dashed]; +"1348 Unsqueeze_1078" -> "1349 Concat_1079" [label="[1]", style=dashed]; +"1349 Concat_1079" -> "1350 Reshape_1080" [label="[2]", style=dashed]; +"1350 Reshape_1080" -> "1353 Gemm_1081" [label="[]", style=solid]; +"1351 QuantizeLinear_h.4.mlp.c_proj.weight_1" -> "1352 DequantizeLinear_h.4.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"1352 DequantizeLinear_h.4.mlp.c_proj.weight_1" -> "1353 Gemm_1081" [label="[3072, 768]", style=solid]; +"1353 Gemm_1081" -> "1359 Reshape_1085" [label="[]", style=solid]; +"1354 Constant_nncf_1250" -> "1355 Unsqueeze_1082" [label="[1]", style=dashed]; +"1355 Unsqueeze_1082" -> "1358 Concat_1084" [label="[1]", style=dashed]; +"1356 Constant_nncf_1252" -> "1357 Unsqueeze_1083" [label="[1]", style=dashed]; +"1357 Unsqueeze_1083" -> "1358 Concat_1084" [label="[1]", style=dashed]; +"1358 Concat_1084" -> "1359 Reshape_1085" [label="[3]", style=dashed]; +"1359 Reshape_1085" -> "1360 Add_1086" [label="[]", style=solid]; +"1360 Add_1086" -> "1361 ReduceMean_1087" [label="[]", style=solid]; +"1360 Add_1086" -> "1362 Sub_1088" [label="[]", style=solid]; +"1360 Add_1086" -> "1551 Add_1238" [label="[]", style=solid]; +"1361 ReduceMean_1087" -> "1362 Sub_1088" [label="[]", style=solid]; +"1362 Sub_1088" -> "1364 Pow_1090" [label="[]", style=solid]; +"1362 Sub_1088" -> "1369 Div_1095" [label="[]", style=solid]; +"1363 Constant_1089" -> "1364 Pow_1090" [label="[]", style=solid]; +"1364 Pow_1090" -> "1365 ReduceMean_1091" [label="[]", style=solid]; +"1365 ReduceMean_1091" -> "1367 Add_1093" [label="[]", style=solid]; +"1366 Constant_1092" -> "1367 Add_1093" [label="[]", style=solid]; +"1367 Add_1093" -> "1368 Sqrt_1094" [label="[]", style=solid]; +"1368 Sqrt_1094" -> "1369 Div_1095" [label="[]", style=solid]; +"1369 Div_1095" -> "1370 Mul_1096" [label="[]", style=solid]; +"1370 Mul_1096" -> "1371 Add_1097" [label="[]", style=solid]; +"1371 Add_1097" -> "1372 QuantizeLinear_1416_1" [label="[]", style=solid]; +"1372 QuantizeLinear_1416_1" -> "1373 DequantizeLinear_1416_1" [label="[]", style=dashed]; +"1373 DequantizeLinear_1416_1" -> "1374 Shape_1098" [label="[]", style=solid]; +"1373 DequantizeLinear_1416_1" -> "1377 Shape_1101" [label="[]", style=solid]; +"1373 DequantizeLinear_1416_1" -> "1380 Shape_1104" [label="[]", style=solid]; +"1373 DequantizeLinear_1416_1" -> "1386 Reshape_1109" [label="[]", style=solid]; +"1374 Shape_1098" -> "1376 Gather_1100" [label="[-1]", style=dashed]; +"1375 Constant_1099" -> "1376 Gather_1100" [label="[]", style=dashed]; +"1376 Gather_1100" -> "1391 Unsqueeze_1111" [label="[]", style=dashed]; +"1377 Shape_1101" -> "1379 Gather_1103" [label="[-1]", style=dashed]; +"1378 Constant_1102" -> "1379 Gather_1103" [label="[]", style=dashed]; +"1379 Gather_1103" -> "1393 Unsqueeze_1112" [label="[]", style=dashed]; +"1380 Shape_1104" -> "1382 Gather_1106" [label="[-1]", style=dashed]; +"1381 Constant_1105" -> "1382 Gather_1106" [label="[]", style=dashed]; +"1382 Gather_1106" -> "1384 Unsqueeze_1107" [label="[]", style=dashed]; +"1383 Constant_nncf_1277" -> "1384 Unsqueeze_1107" [label="[1]", style=dashed]; +"1384 Unsqueeze_1107" -> "1385 Concat_1108" [label="[1]", style=dashed]; +"1385 Concat_1108" -> "1386 Reshape_1109" [label="[2]", style=dashed]; +"1386 Reshape_1109" -> "1389 Gemm_1110" [label="[]", style=solid]; +"1387 QuantizeLinear_h.5.attn.c_attn.weight_1" -> "1388 DequantizeLinear_h.5.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"1388 DequantizeLinear_h.5.attn.c_attn.weight_1" -> "1389 Gemm_1110" [label="[768, 2304]", style=solid]; +"1389 Gemm_1110" -> "1395 Reshape_1114" [label="[]", style=solid]; +"1390 Constant_nncf_1282" -> "1391 Unsqueeze_1111" [label="[1]", style=dashed]; +"1391 Unsqueeze_1111" -> "1394 Concat_1113" [label="[1]", style=dashed]; +"1392 Constant_nncf_1284" -> "1393 Unsqueeze_1112" [label="[1]", style=dashed]; +"1393 Unsqueeze_1112" -> "1394 Concat_1113" [label="[1]", style=dashed]; +"1394 Concat_1113" -> "1395 Reshape_1114" [label="[3]", style=dashed]; +"1395 Reshape_1114" -> "1397 Split_1115" [label="[]", style=solid]; +"1396 Constant_nncf_1288" -> "1397 Split_1115" [label="[3]", style=dashed]; +"1397 Split_1115" -> "1398 QuantizeLinear_query.11_1" [label="[]", style=solid]; +"1397 Split_1115" -> "1400 Shape_1116" [label="[]", style=solid]; +"1397 Split_1115" -> "1403 Shape_1119" [label="[]", style=solid]; +"1397 Split_1115" -> "1406 Shape_1122" [label="[]", style=solid]; +"1397 Split_1115" -> "1422 Shape_1135" [label="[]", style=solid]; +"1397 Split_1115" -> "1425 Shape_1138" [label="[]", style=solid]; +"1397 Split_1115" -> "1428 Shape_1141" [label="[]", style=solid]; +"1397 Split_1115" -> "1442 Reshape_1152" [label="[]", style=solid]; +"1397 Split_1115" -> "1446 Shape_1154" [label="[]", style=solid]; +"1397 Split_1115" -> "1449 Shape_1157" [label="[]", style=solid]; +"1397 Split_1115" -> "1452 Shape_1160" [label="[]", style=solid]; +"1397 Split_1115" -> "1466 Reshape_1171" [label="[]", style=solid]; +"1398 QuantizeLinear_query.11_1" -> "1399 DequantizeLinear_query.11_1" [label="[]", style=dashed]; +"1399 DequantizeLinear_query.11_1" -> "1420 Reshape_1133" [label="[]", style=solid]; +"1400 Shape_1116" -> "1402 Gather_1118" [label="[-1]", style=dashed]; +"1401 Constant_1117" -> "1402 Gather_1118" [label="[]", style=dashed]; +"1402 Gather_1118" -> "1414 Unsqueeze_1129" [label="[]", style=dashed]; +"1403 Shape_1119" -> "1405 Gather_1121" [label="[-1]", style=dashed]; +"1404 Constant_1120" -> "1405 Gather_1121" [label="[]", style=dashed]; +"1405 Gather_1121" -> "1416 Unsqueeze_1130" [label="[]", style=dashed]; +"1406 Shape_1122" -> "1408 Gather_1124" [label="[-1]", style=dashed]; +"1407 Constant_1123" -> "1408 Gather_1124" [label="[]", style=dashed]; +"1408 Gather_1124" -> "1410 Div_1126" [label="[]", style=dashed]; +"1409 Constant_1125" -> "1410 Div_1126" [label="[]", style=dashed]; +"1410 Div_1126" -> "1411 Cast_1127" [label="[]", style=dashed]; +"1411 Cast_1127" -> "1412 Cast_1128" [label="[]", style=dashed]; +"1412 Cast_1128" -> "1418 Unsqueeze_1131" [label="[]", style=dashed]; +"1413 Constant_nncf_1303" -> "1414 Unsqueeze_1129" [label="[1]", style=dashed]; +"1414 Unsqueeze_1129" -> "1419 Concat_1132" [label="[1]", style=dashed]; +"1415 Constant_nncf_1305" -> "1416 Unsqueeze_1130" [label="[1]", style=dashed]; +"1416 Unsqueeze_1130" -> "1419 Concat_1132" [label="[1]", style=dashed]; +"1417 Constant_nncf_1307" -> "1418 Unsqueeze_1131" [label="[1]", style=dashed]; +"1418 Unsqueeze_1131" -> "1419 Concat_1132" [label="[1]", style=dashed]; +"1419 Concat_1132" -> "1420 Reshape_1133" [label="[4]", style=dashed]; +"1420 Reshape_1133" -> "1421 Transpose_1134" [label="[]", style=solid]; +"1421 Transpose_1134" -> "1474 MatMul_1177" [label="[]", style=solid]; +"1422 Shape_1135" -> "1424 Gather_1137" [label="[-1]", style=dashed]; +"1423 Constant_1136" -> "1424 Gather_1137" [label="[]", style=dashed]; +"1424 Gather_1137" -> "1436 Unsqueeze_1148" [label="[]", style=dashed]; +"1425 Shape_1138" -> "1427 Gather_1140" [label="[-1]", style=dashed]; +"1426 Constant_1139" -> "1427 Gather_1140" [label="[]", style=dashed]; +"1427 Gather_1140" -> "1438 Unsqueeze_1149" [label="[]", style=dashed]; +"1428 Shape_1141" -> "1430 Gather_1143" [label="[-1]", style=dashed]; +"1429 Constant_1142" -> "1430 Gather_1143" [label="[]", style=dashed]; +"1430 Gather_1143" -> "1432 Div_1145" [label="[]", style=dashed]; +"1431 Constant_1144" -> "1432 Div_1145" [label="[]", style=dashed]; +"1432 Div_1145" -> "1433 Cast_1146" [label="[]", style=dashed]; +"1433 Cast_1146" -> "1434 Cast_1147" [label="[]", style=dashed]; +"1434 Cast_1147" -> "1440 Unsqueeze_1150" [label="[]", style=dashed]; +"1435 Constant_nncf_1325" -> "1436 Unsqueeze_1148" [label="[1]", style=dashed]; +"1436 Unsqueeze_1148" -> "1441 Concat_1151" [label="[1]", style=dashed]; +"1437 Constant_nncf_1327" -> "1438 Unsqueeze_1149" [label="[1]", style=dashed]; +"1438 Unsqueeze_1149" -> "1441 Concat_1151" [label="[1]", style=dashed]; +"1439 Constant_nncf_1329" -> "1440 Unsqueeze_1150" [label="[1]", style=dashed]; +"1440 Unsqueeze_1150" -> "1441 Concat_1151" [label="[1]", style=dashed]; +"1441 Concat_1151" -> "1442 Reshape_1152" [label="[4]", style=dashed]; +"1442 Reshape_1152" -> "1443 QuantizeLinear_1481_1" [label="[]", style=solid]; +"1442 Reshape_1152" -> "1468 Transpose_1173" [label="[]", style=solid]; +"1443 QuantizeLinear_1481_1" -> "1444 DequantizeLinear_1481_1" [label="[]", style=dashed]; +"1444 DequantizeLinear_1481_1" -> "1445 Transpose_1153" [label="[]", style=solid]; +"1445 Transpose_1153" -> "1474 MatMul_1177" [label="[]", style=solid]; +"1446 Shape_1154" -> "1448 Gather_1156" [label="[-1]", style=dashed]; +"1447 Constant_1155" -> "1448 Gather_1156" [label="[]", style=dashed]; +"1448 Gather_1156" -> "1460 Unsqueeze_1167" [label="[]", style=dashed]; +"1449 Shape_1157" -> "1451 Gather_1159" [label="[-1]", style=dashed]; +"1450 Constant_1158" -> "1451 Gather_1159" [label="[]", style=dashed]; +"1451 Gather_1159" -> "1462 Unsqueeze_1168" [label="[]", style=dashed]; +"1452 Shape_1160" -> "1454 Gather_1162" [label="[-1]", style=dashed]; +"1453 Constant_1161" -> "1454 Gather_1162" [label="[]", style=dashed]; +"1454 Gather_1162" -> "1456 Div_1164" [label="[]", style=dashed]; +"1455 Constant_1163" -> "1456 Div_1164" [label="[]", style=dashed]; +"1456 Div_1164" -> "1457 Cast_1165" [label="[]", style=dashed]; +"1457 Cast_1165" -> "1458 Cast_1166" [label="[]", style=dashed]; +"1458 Cast_1166" -> "1464 Unsqueeze_1169" [label="[]", style=dashed]; +"1459 Constant_nncf_1347" -> "1460 Unsqueeze_1167" [label="[1]", style=dashed]; +"1460 Unsqueeze_1167" -> "1465 Concat_1170" [label="[1]", style=dashed]; +"1461 Constant_nncf_1349" -> "1462 Unsqueeze_1168" [label="[1]", style=dashed]; +"1462 Unsqueeze_1168" -> "1465 Concat_1170" [label="[1]", style=dashed]; +"1463 Constant_nncf_1351" -> "1464 Unsqueeze_1169" [label="[1]", style=dashed]; +"1464 Unsqueeze_1169" -> "1465 Concat_1170" [label="[1]", style=dashed]; +"1465 Concat_1170" -> "1466 Reshape_1171" [label="[4]", style=dashed]; +"1466 Reshape_1171" -> "1467 Transpose_1172" [label="[]", style=solid]; +"1467 Transpose_1172" -> "1472 Unsqueeze_1175" [label="[]", style=solid]; +"1467 Transpose_1172" -> "1504 MatMul_1201" [label="[]", style=solid]; +"1468 Transpose_1173" -> "1470 Unsqueeze_1174" [label="[]", style=solid]; +"1469 Constant_nncf_1357" -> "1470 Unsqueeze_1174" [label="[1]", style=dashed]; +"1470 Unsqueeze_1174" -> "1473 Concat_1176" [label="[]", style=solid]; +"1471 Constant_nncf_1359" -> "1472 Unsqueeze_1175" [label="[1]", style=dashed]; +"1472 Unsqueeze_1175" -> "1473 Concat_1176" [label="[]", style=solid]; +"1473 Concat_1176" -> "3237 nncf_model_output_6" [label="[2, 1, 12, 8, 64]", style=solid]; +"1474 MatMul_1177" -> "1476 Div_1179" [label="[]", style=solid]; +"1475 Constant_1178" -> "1476 Div_1179" [label="[]", style=solid]; +"1476 Div_1179" -> "1477 Shape_1180" [label="[]", style=solid]; +"1476 Div_1179" -> "1480 Shape_1183" [label="[]", style=solid]; +"1476 Div_1179" -> "1494 Mul_1194" [label="[]", style=solid]; +"1477 Shape_1180" -> "1479 Gather_1182" [label="[-1]", style=dashed]; +"1478 Constant_1181" -> "1479 Gather_1182" [label="[]", style=dashed]; +"1479 Gather_1182" -> "1483 Sub_1186" [label="[]", style=dashed]; +"1480 Shape_1183" -> "1482 Gather_1185" [label="[-1]", style=dashed]; +"1481 Constant_1184" -> "1482 Gather_1185" [label="[]", style=dashed]; +"1482 Gather_1185" -> "1483 Sub_1186" [label="[]", style=dashed]; +"1482 Gather_1185" -> "1487 Unsqueeze_1188" [label="[]", style=dashed]; +"1482 Gather_1185" -> "1491 Unsqueeze_1191" [label="[]", style=dashed]; +"1483 Sub_1186" -> "1485 Unsqueeze_1187" [label="[]", style=dashed]; +"1484 Constant_nncf_1372" -> "1485 Unsqueeze_1187" [label="[1]", style=dashed]; +"1485 Unsqueeze_1187" -> "1489 Slice_1190" [label="[1]", style=dashed]; +"1486 Constant_nncf_1374" -> "1487 Unsqueeze_1188" [label="[1]", style=dashed]; +"1487 Unsqueeze_1188" -> "1489 Slice_1190" [label="[1]", style=dashed]; +"1488 Constant_1189" -> "1489 Slice_1190" [label="[1]", style=dashed]; +"1489 Slice_1190" -> "1493 Slice_1193" [label="[-1, -1, -1, -1]", style=solid]; +"1490 Constant_nncf_1378" -> "1491 Unsqueeze_1191" [label="[1]", style=dashed]; +"1491 Unsqueeze_1191" -> "1493 Slice_1193" [label="[1]", style=dashed]; +"1492 Constant_1192" -> "1493 Slice_1193" [label="[1]", style=dashed]; +"1493 Slice_1193" -> "1494 Mul_1194" [label="[-1, -1, -1, -1]", style=solid]; +"1493 Slice_1193" -> "1496 Sub_1196" [label="[-1, -1, -1, -1]", style=solid]; +"1494 Mul_1194" -> "1499 Sub_1199" [label="[]", style=solid]; +"1495 Constant_1195" -> "1496 Sub_1196" [label="[]", style=solid]; +"1496 Sub_1196" -> "1498 Mul_1198" [label="[-1, -1, -1, -1]", style=solid]; +"1497 Constant_1197" -> "1498 Mul_1198" [label="[]", style=solid]; +"1498 Mul_1198" -> "1499 Sub_1199" [label="[-1, -1, -1, -1]", style=solid]; +"1499 Sub_1199" -> "1500 Shape_nncf_1388" [label="[]", style=solid]; +"1499 Sub_1199" -> "1501 Flatten_nncf_1389" [label="[]", style=solid]; +"1500 Shape_nncf_1388" -> "1503 Reshape_nncf_1391" [label="[-1]", style=dashed]; +"1501 Flatten_nncf_1389" -> "1502 Softmax_1200" [label="[]", style=solid]; +"1502 Softmax_1200" -> "1503 Reshape_nncf_1391" [label="[]", style=solid]; +"1503 Reshape_nncf_1391" -> "1504 MatMul_1201" [label="[]", style=solid]; +"1504 MatMul_1201" -> "1505 QuantizeLinear_1538_1" [label="[]", style=solid]; +"1505 QuantizeLinear_1538_1" -> "1506 DequantizeLinear_1538_1" [label="[]", style=dashed]; +"1506 DequantizeLinear_1538_1" -> "1507 Transpose_1202" [label="[]", style=solid]; +"1507 Transpose_1202" -> "1508 Shape_1203" [label="[]", style=solid]; +"1507 Transpose_1202" -> "1511 Shape_1206" [label="[]", style=solid]; +"1507 Transpose_1202" -> "1514 Shape_1209" [label="[]", style=solid]; +"1507 Transpose_1202" -> "1517 Shape_1212" [label="[]", style=solid]; +"1507 Transpose_1202" -> "1528 Reshape_1220" [label="[]", style=solid]; +"1508 Shape_1203" -> "1510 Gather_1205" [label="[-1]", style=dashed]; +"1509 Constant_1204" -> "1510 Gather_1205" [label="[]", style=dashed]; +"1510 Gather_1205" -> "1522 Unsqueeze_1216" [label="[]", style=dashed]; +"1511 Shape_1206" -> "1513 Gather_1208" [label="[-1]", style=dashed]; +"1512 Constant_1207" -> "1513 Gather_1208" [label="[]", style=dashed]; +"1513 Gather_1208" -> "1524 Unsqueeze_1217" [label="[]", style=dashed]; +"1514 Shape_1209" -> "1516 Gather_1211" [label="[-1]", style=dashed]; +"1515 Constant_1210" -> "1516 Gather_1211" [label="[]", style=dashed]; +"1516 Gather_1211" -> "1520 Mul_1215" [label="[]", style=dashed]; +"1517 Shape_1212" -> "1519 Gather_1214" [label="[-1]", style=dashed]; +"1518 Constant_1213" -> "1519 Gather_1214" [label="[]", style=dashed]; +"1519 Gather_1214" -> "1520 Mul_1215" [label="[]", style=dashed]; +"1520 Mul_1215" -> "1526 Unsqueeze_1218" [label="[]", style=dashed]; +"1521 Constant_nncf_1407" -> "1522 Unsqueeze_1216" [label="[1]", style=dashed]; +"1522 Unsqueeze_1216" -> "1527 Concat_1219" [label="[1]", style=dashed]; +"1523 Constant_nncf_1409" -> "1524 Unsqueeze_1217" [label="[1]", style=dashed]; +"1524 Unsqueeze_1217" -> "1527 Concat_1219" [label="[1]", style=dashed]; +"1525 Constant_nncf_1411" -> "1526 Unsqueeze_1218" [label="[1]", style=dashed]; +"1526 Unsqueeze_1218" -> "1527 Concat_1219" [label="[1]", style=dashed]; +"1527 Concat_1219" -> "1528 Reshape_1220" [label="[3]", style=dashed]; +"1528 Reshape_1220" -> "1529 Shape_1221" [label="[]", style=solid]; +"1528 Reshape_1220" -> "1532 Shape_1224" [label="[]", style=solid]; +"1528 Reshape_1220" -> "1535 Shape_1227" [label="[]", style=solid]; +"1528 Reshape_1220" -> "1541 Reshape_1232" [label="[]", style=solid]; +"1529 Shape_1221" -> "1531 Gather_1223" [label="[-1]", style=dashed]; +"1530 Constant_1222" -> "1531 Gather_1223" [label="[]", style=dashed]; +"1531 Gather_1223" -> "1546 Unsqueeze_1234" [label="[]", style=dashed]; +"1532 Shape_1224" -> "1534 Gather_1226" [label="[-1]", style=dashed]; +"1533 Constant_1225" -> "1534 Gather_1226" [label="[]", style=dashed]; +"1534 Gather_1226" -> "1548 Unsqueeze_1235" [label="[]", style=dashed]; +"1535 Shape_1227" -> "1537 Gather_1229" [label="[-1]", style=dashed]; +"1536 Constant_1228" -> "1537 Gather_1229" [label="[]", style=dashed]; +"1537 Gather_1229" -> "1539 Unsqueeze_1230" [label="[]", style=dashed]; +"1538 Constant_nncf_1424" -> "1539 Unsqueeze_1230" [label="[1]", style=dashed]; +"1539 Unsqueeze_1230" -> "1540 Concat_1231" [label="[1]", style=dashed]; +"1540 Concat_1231" -> "1541 Reshape_1232" [label="[2]", style=dashed]; +"1541 Reshape_1232" -> "1544 Gemm_1233" [label="[]", style=solid]; +"1542 QuantizeLinear_h.5.attn.c_proj.weight_1" -> "1543 DequantizeLinear_h.5.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"1543 DequantizeLinear_h.5.attn.c_proj.weight_1" -> "1544 Gemm_1233" [label="[768, 768]", style=solid]; +"1544 Gemm_1233" -> "1550 Reshape_1237" [label="[]", style=solid]; +"1545 Constant_nncf_1429" -> "1546 Unsqueeze_1234" [label="[1]", style=dashed]; +"1546 Unsqueeze_1234" -> "1549 Concat_1236" [label="[1]", style=dashed]; +"1547 Constant_nncf_1431" -> "1548 Unsqueeze_1235" [label="[1]", style=dashed]; +"1548 Unsqueeze_1235" -> "1549 Concat_1236" [label="[1]", style=dashed]; +"1549 Concat_1236" -> "1550 Reshape_1237" [label="[3]", style=dashed]; +"1550 Reshape_1237" -> "1551 Add_1238" [label="[]", style=solid]; +"1551 Add_1238" -> "1552 ReduceMean_1239" [label="[]", style=solid]; +"1551 Add_1238" -> "1553 Sub_1240" [label="[]", style=solid]; +"1551 Add_1238" -> "1624 Add_1297" [label="[]", style=solid]; +"1552 ReduceMean_1239" -> "1553 Sub_1240" [label="[]", style=solid]; +"1553 Sub_1240" -> "1555 Pow_1242" [label="[]", style=solid]; +"1553 Sub_1240" -> "1560 Div_1247" [label="[]", style=solid]; +"1554 Constant_1241" -> "1555 Pow_1242" [label="[]", style=solid]; +"1555 Pow_1242" -> "1556 ReduceMean_1243" [label="[]", style=solid]; +"1556 ReduceMean_1243" -> "1558 Add_1245" [label="[]", style=solid]; +"1557 Constant_1244" -> "1558 Add_1245" [label="[]", style=solid]; +"1558 Add_1245" -> "1559 Sqrt_1246" [label="[]", style=solid]; +"1559 Sqrt_1246" -> "1560 Div_1247" [label="[]", style=solid]; +"1560 Div_1247" -> "1561 Mul_1248" [label="[]", style=solid]; +"1561 Mul_1248" -> "1562 Add_1249" [label="[]", style=solid]; +"1562 Add_1249" -> "1563 QuantizeLinear_1590_1" [label="[]", style=solid]; +"1563 QuantizeLinear_1590_1" -> "1564 DequantizeLinear_1590_1" [label="[]", style=dashed]; +"1564 DequantizeLinear_1590_1" -> "1565 Shape_1250" [label="[]", style=solid]; +"1564 DequantizeLinear_1590_1" -> "1568 Shape_1253" [label="[]", style=solid]; +"1564 DequantizeLinear_1590_1" -> "1571 Shape_1256" [label="[]", style=solid]; +"1564 DequantizeLinear_1590_1" -> "1577 Reshape_1261" [label="[]", style=solid]; +"1565 Shape_1250" -> "1567 Gather_1252" [label="[-1]", style=dashed]; +"1566 Constant_1251" -> "1567 Gather_1252" [label="[]", style=dashed]; +"1567 Gather_1252" -> "1582 Unsqueeze_1263" [label="[]", style=dashed]; +"1568 Shape_1253" -> "1570 Gather_1255" [label="[-1]", style=dashed]; +"1569 Constant_1254" -> "1570 Gather_1255" [label="[]", style=dashed]; +"1570 Gather_1255" -> "1584 Unsqueeze_1264" [label="[]", style=dashed]; +"1571 Shape_1256" -> "1573 Gather_1258" [label="[-1]", style=dashed]; +"1572 Constant_1257" -> "1573 Gather_1258" [label="[]", style=dashed]; +"1573 Gather_1258" -> "1575 Unsqueeze_1259" [label="[]", style=dashed]; +"1574 Constant_nncf_1456" -> "1575 Unsqueeze_1259" [label="[1]", style=dashed]; +"1575 Unsqueeze_1259" -> "1576 Concat_1260" [label="[1]", style=dashed]; +"1576 Concat_1260" -> "1577 Reshape_1261" [label="[2]", style=dashed]; +"1577 Reshape_1261" -> "1580 Gemm_1262" [label="[]", style=solid]; +"1578 QuantizeLinear_h.5.mlp.c_fc.weight_1" -> "1579 DequantizeLinear_h.5.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"1579 DequantizeLinear_h.5.mlp.c_fc.weight_1" -> "1580 Gemm_1262" [label="[768, 3072]", style=solid]; +"1580 Gemm_1262" -> "1586 Reshape_1266" [label="[]", style=solid]; +"1581 Constant_nncf_1461" -> "1582 Unsqueeze_1263" [label="[1]", style=dashed]; +"1582 Unsqueeze_1263" -> "1585 Concat_1265" [label="[1]", style=dashed]; +"1583 Constant_nncf_1463" -> "1584 Unsqueeze_1264" [label="[1]", style=dashed]; +"1584 Unsqueeze_1264" -> "1585 Concat_1265" [label="[1]", style=dashed]; +"1585 Concat_1265" -> "1586 Reshape_1266" [label="[3]", style=dashed]; +"1586 Reshape_1266" -> "1588 Mul_1268" [label="[]", style=solid]; +"1586 Reshape_1266" -> "1590 Pow_1270" [label="[]", style=solid]; +"1586 Reshape_1266" -> "1593 Add_1273" [label="[]", style=solid]; +"1587 Constant_1267" -> "1588 Mul_1268" [label="[]", style=solid]; +"1588 Mul_1268" -> "1599 Mul_1279" [label="[]", style=solid]; +"1589 Constant_1269" -> "1590 Pow_1270" [label="[]", style=solid]; +"1590 Pow_1270" -> "1592 Mul_1272" [label="[]", style=solid]; +"1591 Constant_1271" -> "1592 Mul_1272" [label="[]", style=solid]; +"1592 Mul_1272" -> "1593 Add_1273" [label="[]", style=solid]; +"1593 Add_1273" -> "1595 Mul_1275" [label="[]", style=solid]; +"1594 Constant_1274" -> "1595 Mul_1275" [label="[]", style=solid]; +"1595 Mul_1275" -> "1596 Tanh_1276" [label="[]", style=solid]; +"1596 Tanh_1276" -> "1598 Add_1278" [label="[]", style=solid]; +"1597 Constant_1277" -> "1598 Add_1278" [label="[]", style=solid]; +"1598 Add_1278" -> "1599 Mul_1279" [label="[]", style=solid]; +"1599 Mul_1279" -> "1600 QuantizeLinear_1624_1" [label="[]", style=solid]; +"1600 QuantizeLinear_1624_1" -> "1601 DequantizeLinear_1624_1" [label="[]", style=dashed]; +"1601 DequantizeLinear_1624_1" -> "1602 Shape_1280" [label="[]", style=solid]; +"1601 DequantizeLinear_1624_1" -> "1605 Shape_1283" [label="[]", style=solid]; +"1601 DequantizeLinear_1624_1" -> "1608 Shape_1286" [label="[]", style=solid]; +"1601 DequantizeLinear_1624_1" -> "1614 Reshape_1291" [label="[]", style=solid]; +"1602 Shape_1280" -> "1604 Gather_1282" [label="[-1]", style=dashed]; +"1603 Constant_1281" -> "1604 Gather_1282" [label="[]", style=dashed]; +"1604 Gather_1282" -> "1619 Unsqueeze_1293" [label="[]", style=dashed]; +"1605 Shape_1283" -> "1607 Gather_1285" [label="[-1]", style=dashed]; +"1606 Constant_1284" -> "1607 Gather_1285" [label="[]", style=dashed]; +"1607 Gather_1285" -> "1621 Unsqueeze_1294" [label="[]", style=dashed]; +"1608 Shape_1286" -> "1610 Gather_1288" [label="[-1]", style=dashed]; +"1609 Constant_1287" -> "1610 Gather_1288" [label="[]", style=dashed]; +"1610 Gather_1288" -> "1612 Unsqueeze_1289" [label="[]", style=dashed]; +"1611 Constant_nncf_1489" -> "1612 Unsqueeze_1289" [label="[1]", style=dashed]; +"1612 Unsqueeze_1289" -> "1613 Concat_1290" [label="[1]", style=dashed]; +"1613 Concat_1290" -> "1614 Reshape_1291" [label="[2]", style=dashed]; +"1614 Reshape_1291" -> "1617 Gemm_1292" [label="[]", style=solid]; +"1615 QuantizeLinear_h.5.mlp.c_proj.weight_1" -> "1616 DequantizeLinear_h.5.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"1616 DequantizeLinear_h.5.mlp.c_proj.weight_1" -> "1617 Gemm_1292" [label="[3072, 768]", style=solid]; +"1617 Gemm_1292" -> "1623 Reshape_1296" [label="[]", style=solid]; +"1618 Constant_nncf_1494" -> "1619 Unsqueeze_1293" [label="[1]", style=dashed]; +"1619 Unsqueeze_1293" -> "1622 Concat_1295" [label="[1]", style=dashed]; +"1620 Constant_nncf_1496" -> "1621 Unsqueeze_1294" [label="[1]", style=dashed]; +"1621 Unsqueeze_1294" -> "1622 Concat_1295" [label="[1]", style=dashed]; +"1622 Concat_1295" -> "1623 Reshape_1296" [label="[3]", style=dashed]; +"1623 Reshape_1296" -> "1624 Add_1297" [label="[]", style=solid]; +"1624 Add_1297" -> "1625 ReduceMean_1298" [label="[]", style=solid]; +"1624 Add_1297" -> "1626 Sub_1299" [label="[]", style=solid]; +"1624 Add_1297" -> "1815 Add_1449" [label="[]", style=solid]; +"1625 ReduceMean_1298" -> "1626 Sub_1299" [label="[]", style=solid]; +"1626 Sub_1299" -> "1628 Pow_1301" [label="[]", style=solid]; +"1626 Sub_1299" -> "1633 Div_1306" [label="[]", style=solid]; +"1627 Constant_1300" -> "1628 Pow_1301" [label="[]", style=solid]; +"1628 Pow_1301" -> "1629 ReduceMean_1302" [label="[]", style=solid]; +"1629 ReduceMean_1302" -> "1631 Add_1304" [label="[]", style=solid]; +"1630 Constant_1303" -> "1631 Add_1304" [label="[]", style=solid]; +"1631 Add_1304" -> "1632 Sqrt_1305" [label="[]", style=solid]; +"1632 Sqrt_1305" -> "1633 Div_1306" [label="[]", style=solid]; +"1633 Div_1306" -> "1634 Mul_1307" [label="[]", style=solid]; +"1634 Mul_1307" -> "1635 Add_1308" [label="[]", style=solid]; +"1635 Add_1308" -> "1636 QuantizeLinear_1657_1" [label="[]", style=solid]; +"1636 QuantizeLinear_1657_1" -> "1637 DequantizeLinear_1657_1" [label="[]", style=dashed]; +"1637 DequantizeLinear_1657_1" -> "1638 Shape_1309" [label="[]", style=solid]; +"1637 DequantizeLinear_1657_1" -> "1641 Shape_1312" [label="[]", style=solid]; +"1637 DequantizeLinear_1657_1" -> "1644 Shape_1315" [label="[]", style=solid]; +"1637 DequantizeLinear_1657_1" -> "1650 Reshape_1320" [label="[]", style=solid]; +"1638 Shape_1309" -> "1640 Gather_1311" [label="[-1]", style=dashed]; +"1639 Constant_1310" -> "1640 Gather_1311" [label="[]", style=dashed]; +"1640 Gather_1311" -> "1655 Unsqueeze_1322" [label="[]", style=dashed]; +"1641 Shape_1312" -> "1643 Gather_1314" [label="[-1]", style=dashed]; +"1642 Constant_1313" -> "1643 Gather_1314" [label="[]", style=dashed]; +"1643 Gather_1314" -> "1657 Unsqueeze_1323" [label="[]", style=dashed]; +"1644 Shape_1315" -> "1646 Gather_1317" [label="[-1]", style=dashed]; +"1645 Constant_1316" -> "1646 Gather_1317" [label="[]", style=dashed]; +"1646 Gather_1317" -> "1648 Unsqueeze_1318" [label="[]", style=dashed]; +"1647 Constant_nncf_1521" -> "1648 Unsqueeze_1318" [label="[1]", style=dashed]; +"1648 Unsqueeze_1318" -> "1649 Concat_1319" [label="[1]", style=dashed]; +"1649 Concat_1319" -> "1650 Reshape_1320" [label="[2]", style=dashed]; +"1650 Reshape_1320" -> "1653 Gemm_1321" [label="[]", style=solid]; +"1651 QuantizeLinear_h.6.attn.c_attn.weight_1" -> "1652 DequantizeLinear_h.6.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"1652 DequantizeLinear_h.6.attn.c_attn.weight_1" -> "1653 Gemm_1321" [label="[768, 2304]", style=solid]; +"1653 Gemm_1321" -> "1659 Reshape_1325" [label="[]", style=solid]; +"1654 Constant_nncf_1526" -> "1655 Unsqueeze_1322" [label="[1]", style=dashed]; +"1655 Unsqueeze_1322" -> "1658 Concat_1324" [label="[1]", style=dashed]; +"1656 Constant_nncf_1528" -> "1657 Unsqueeze_1323" [label="[1]", style=dashed]; +"1657 Unsqueeze_1323" -> "1658 Concat_1324" [label="[1]", style=dashed]; +"1658 Concat_1324" -> "1659 Reshape_1325" [label="[3]", style=dashed]; +"1659 Reshape_1325" -> "1661 Split_1326" [label="[]", style=solid]; +"1660 Constant_nncf_1532" -> "1661 Split_1326" [label="[3]", style=dashed]; +"1661 Split_1326" -> "1662 QuantizeLinear_query.13_1" [label="[]", style=solid]; +"1661 Split_1326" -> "1664 Shape_1327" [label="[]", style=solid]; +"1661 Split_1326" -> "1667 Shape_1330" [label="[]", style=solid]; +"1661 Split_1326" -> "1670 Shape_1333" [label="[]", style=solid]; +"1661 Split_1326" -> "1686 Shape_1346" [label="[]", style=solid]; +"1661 Split_1326" -> "1689 Shape_1349" [label="[]", style=solid]; +"1661 Split_1326" -> "1692 Shape_1352" [label="[]", style=solid]; +"1661 Split_1326" -> "1706 Reshape_1363" [label="[]", style=solid]; +"1661 Split_1326" -> "1710 Shape_1365" [label="[]", style=solid]; +"1661 Split_1326" -> "1713 Shape_1368" [label="[]", style=solid]; +"1661 Split_1326" -> "1716 Shape_1371" [label="[]", style=solid]; +"1661 Split_1326" -> "1730 Reshape_1382" [label="[]", style=solid]; +"1662 QuantizeLinear_query.13_1" -> "1663 DequantizeLinear_query.13_1" [label="[]", style=dashed]; +"1663 DequantizeLinear_query.13_1" -> "1684 Reshape_1344" [label="[]", style=solid]; +"1664 Shape_1327" -> "1666 Gather_1329" [label="[-1]", style=dashed]; +"1665 Constant_1328" -> "1666 Gather_1329" [label="[]", style=dashed]; +"1666 Gather_1329" -> "1678 Unsqueeze_1340" [label="[]", style=dashed]; +"1667 Shape_1330" -> "1669 Gather_1332" [label="[-1]", style=dashed]; +"1668 Constant_1331" -> "1669 Gather_1332" [label="[]", style=dashed]; +"1669 Gather_1332" -> "1680 Unsqueeze_1341" [label="[]", style=dashed]; +"1670 Shape_1333" -> "1672 Gather_1335" [label="[-1]", style=dashed]; +"1671 Constant_1334" -> "1672 Gather_1335" [label="[]", style=dashed]; +"1672 Gather_1335" -> "1674 Div_1337" [label="[]", style=dashed]; +"1673 Constant_1336" -> "1674 Div_1337" [label="[]", style=dashed]; +"1674 Div_1337" -> "1675 Cast_1338" [label="[]", style=dashed]; +"1675 Cast_1338" -> "1676 Cast_1339" [label="[]", style=dashed]; +"1676 Cast_1339" -> "1682 Unsqueeze_1342" [label="[]", style=dashed]; +"1677 Constant_nncf_1547" -> "1678 Unsqueeze_1340" [label="[1]", style=dashed]; +"1678 Unsqueeze_1340" -> "1683 Concat_1343" [label="[1]", style=dashed]; +"1679 Constant_nncf_1549" -> "1680 Unsqueeze_1341" [label="[1]", style=dashed]; +"1680 Unsqueeze_1341" -> "1683 Concat_1343" [label="[1]", style=dashed]; +"1681 Constant_nncf_1551" -> "1682 Unsqueeze_1342" [label="[1]", style=dashed]; +"1682 Unsqueeze_1342" -> "1683 Concat_1343" [label="[1]", style=dashed]; +"1683 Concat_1343" -> "1684 Reshape_1344" [label="[4]", style=dashed]; +"1684 Reshape_1344" -> "1685 Transpose_1345" [label="[]", style=solid]; +"1685 Transpose_1345" -> "1738 MatMul_1388" [label="[]", style=solid]; +"1686 Shape_1346" -> "1688 Gather_1348" [label="[-1]", style=dashed]; +"1687 Constant_1347" -> "1688 Gather_1348" [label="[]", style=dashed]; +"1688 Gather_1348" -> "1700 Unsqueeze_1359" [label="[]", style=dashed]; +"1689 Shape_1349" -> "1691 Gather_1351" [label="[-1]", style=dashed]; +"1690 Constant_1350" -> "1691 Gather_1351" [label="[]", style=dashed]; +"1691 Gather_1351" -> "1702 Unsqueeze_1360" [label="[]", style=dashed]; +"1692 Shape_1352" -> "1694 Gather_1354" [label="[-1]", style=dashed]; +"1693 Constant_1353" -> "1694 Gather_1354" [label="[]", style=dashed]; +"1694 Gather_1354" -> "1696 Div_1356" [label="[]", style=dashed]; +"1695 Constant_1355" -> "1696 Div_1356" [label="[]", style=dashed]; +"1696 Div_1356" -> "1697 Cast_1357" [label="[]", style=dashed]; +"1697 Cast_1357" -> "1698 Cast_1358" [label="[]", style=dashed]; +"1698 Cast_1358" -> "1704 Unsqueeze_1361" [label="[]", style=dashed]; +"1699 Constant_nncf_1569" -> "1700 Unsqueeze_1359" [label="[1]", style=dashed]; +"1700 Unsqueeze_1359" -> "1705 Concat_1362" [label="[1]", style=dashed]; +"1701 Constant_nncf_1571" -> "1702 Unsqueeze_1360" [label="[1]", style=dashed]; +"1702 Unsqueeze_1360" -> "1705 Concat_1362" [label="[1]", style=dashed]; +"1703 Constant_nncf_1573" -> "1704 Unsqueeze_1361" [label="[1]", style=dashed]; +"1704 Unsqueeze_1361" -> "1705 Concat_1362" [label="[1]", style=dashed]; +"1705 Concat_1362" -> "1706 Reshape_1363" [label="[4]", style=dashed]; +"1706 Reshape_1363" -> "1707 QuantizeLinear_1722_1" [label="[]", style=solid]; +"1706 Reshape_1363" -> "1732 Transpose_1384" [label="[]", style=solid]; +"1707 QuantizeLinear_1722_1" -> "1708 DequantizeLinear_1722_1" [label="[]", style=dashed]; +"1708 DequantizeLinear_1722_1" -> "1709 Transpose_1364" [label="[]", style=solid]; +"1709 Transpose_1364" -> "1738 MatMul_1388" [label="[]", style=solid]; +"1710 Shape_1365" -> "1712 Gather_1367" [label="[-1]", style=dashed]; +"1711 Constant_1366" -> "1712 Gather_1367" [label="[]", style=dashed]; +"1712 Gather_1367" -> "1724 Unsqueeze_1378" [label="[]", style=dashed]; +"1713 Shape_1368" -> "1715 Gather_1370" [label="[-1]", style=dashed]; +"1714 Constant_1369" -> "1715 Gather_1370" [label="[]", style=dashed]; +"1715 Gather_1370" -> "1726 Unsqueeze_1379" [label="[]", style=dashed]; +"1716 Shape_1371" -> "1718 Gather_1373" [label="[-1]", style=dashed]; +"1717 Constant_1372" -> "1718 Gather_1373" [label="[]", style=dashed]; +"1718 Gather_1373" -> "1720 Div_1375" [label="[]", style=dashed]; +"1719 Constant_1374" -> "1720 Div_1375" [label="[]", style=dashed]; +"1720 Div_1375" -> "1721 Cast_1376" [label="[]", style=dashed]; +"1721 Cast_1376" -> "1722 Cast_1377" [label="[]", style=dashed]; +"1722 Cast_1377" -> "1728 Unsqueeze_1380" [label="[]", style=dashed]; +"1723 Constant_nncf_1591" -> "1724 Unsqueeze_1378" [label="[1]", style=dashed]; +"1724 Unsqueeze_1378" -> "1729 Concat_1381" [label="[1]", style=dashed]; +"1725 Constant_nncf_1593" -> "1726 Unsqueeze_1379" [label="[1]", style=dashed]; +"1726 Unsqueeze_1379" -> "1729 Concat_1381" [label="[1]", style=dashed]; +"1727 Constant_nncf_1595" -> "1728 Unsqueeze_1380" [label="[1]", style=dashed]; +"1728 Unsqueeze_1380" -> "1729 Concat_1381" [label="[1]", style=dashed]; +"1729 Concat_1381" -> "1730 Reshape_1382" [label="[4]", style=dashed]; +"1730 Reshape_1382" -> "1731 Transpose_1383" [label="[]", style=solid]; +"1731 Transpose_1383" -> "1736 Unsqueeze_1386" [label="[]", style=solid]; +"1731 Transpose_1383" -> "1768 MatMul_1412" [label="[]", style=solid]; +"1732 Transpose_1384" -> "1734 Unsqueeze_1385" [label="[]", style=solid]; +"1733 Constant_nncf_1601" -> "1734 Unsqueeze_1385" [label="[1]", style=dashed]; +"1734 Unsqueeze_1385" -> "1737 Concat_1387" [label="[]", style=solid]; +"1735 Constant_nncf_1603" -> "1736 Unsqueeze_1386" [label="[1]", style=dashed]; +"1736 Unsqueeze_1386" -> "1737 Concat_1387" [label="[]", style=solid]; +"1737 Concat_1387" -> "3238 nncf_model_output_7" [label="[2, 1, 12, 8, 64]", style=solid]; +"1738 MatMul_1388" -> "1740 Div_1390" [label="[]", style=solid]; +"1739 Constant_1389" -> "1740 Div_1390" [label="[]", style=solid]; +"1740 Div_1390" -> "1741 Shape_1391" [label="[]", style=solid]; +"1740 Div_1390" -> "1744 Shape_1394" [label="[]", style=solid]; +"1740 Div_1390" -> "1758 Mul_1405" [label="[]", style=solid]; +"1741 Shape_1391" -> "1743 Gather_1393" [label="[-1]", style=dashed]; +"1742 Constant_1392" -> "1743 Gather_1393" [label="[]", style=dashed]; +"1743 Gather_1393" -> "1747 Sub_1397" [label="[]", style=dashed]; +"1744 Shape_1394" -> "1746 Gather_1396" [label="[-1]", style=dashed]; +"1745 Constant_1395" -> "1746 Gather_1396" [label="[]", style=dashed]; +"1746 Gather_1396" -> "1747 Sub_1397" [label="[]", style=dashed]; +"1746 Gather_1396" -> "1751 Unsqueeze_1399" [label="[]", style=dashed]; +"1746 Gather_1396" -> "1755 Unsqueeze_1402" [label="[]", style=dashed]; +"1747 Sub_1397" -> "1749 Unsqueeze_1398" [label="[]", style=dashed]; +"1748 Constant_nncf_1616" -> "1749 Unsqueeze_1398" [label="[1]", style=dashed]; +"1749 Unsqueeze_1398" -> "1753 Slice_1401" [label="[1]", style=dashed]; +"1750 Constant_nncf_1618" -> "1751 Unsqueeze_1399" [label="[1]", style=dashed]; +"1751 Unsqueeze_1399" -> "1753 Slice_1401" [label="[1]", style=dashed]; +"1752 Constant_1400" -> "1753 Slice_1401" [label="[1]", style=dashed]; +"1753 Slice_1401" -> "1757 Slice_1404" [label="[-1, -1, -1, -1]", style=solid]; +"1754 Constant_nncf_1622" -> "1755 Unsqueeze_1402" [label="[1]", style=dashed]; +"1755 Unsqueeze_1402" -> "1757 Slice_1404" [label="[1]", style=dashed]; +"1756 Constant_1403" -> "1757 Slice_1404" [label="[1]", style=dashed]; +"1757 Slice_1404" -> "1758 Mul_1405" [label="[-1, -1, -1, -1]", style=solid]; +"1757 Slice_1404" -> "1760 Sub_1407" [label="[-1, -1, -1, -1]", style=solid]; +"1758 Mul_1405" -> "1763 Sub_1410" [label="[]", style=solid]; +"1759 Constant_1406" -> "1760 Sub_1407" [label="[]", style=solid]; +"1760 Sub_1407" -> "1762 Mul_1409" [label="[-1, -1, -1, -1]", style=solid]; +"1761 Constant_1408" -> "1762 Mul_1409" [label="[]", style=solid]; +"1762 Mul_1409" -> "1763 Sub_1410" [label="[-1, -1, -1, -1]", style=solid]; +"1763 Sub_1410" -> "1764 Shape_nncf_1632" [label="[]", style=solid]; +"1763 Sub_1410" -> "1765 Flatten_nncf_1633" [label="[]", style=solid]; +"1764 Shape_nncf_1632" -> "1767 Reshape_nncf_1635" [label="[-1]", style=dashed]; +"1765 Flatten_nncf_1633" -> "1766 Softmax_1411" [label="[]", style=solid]; +"1766 Softmax_1411" -> "1767 Reshape_nncf_1635" [label="[]", style=solid]; +"1767 Reshape_nncf_1635" -> "1768 MatMul_1412" [label="[]", style=solid]; +"1768 MatMul_1412" -> "1769 QuantizeLinear_1779_1" [label="[]", style=solid]; +"1769 QuantizeLinear_1779_1" -> "1770 DequantizeLinear_1779_1" [label="[]", style=dashed]; +"1770 DequantizeLinear_1779_1" -> "1771 Transpose_1413" [label="[]", style=solid]; +"1771 Transpose_1413" -> "1772 Shape_1414" [label="[]", style=solid]; +"1771 Transpose_1413" -> "1775 Shape_1417" [label="[]", style=solid]; +"1771 Transpose_1413" -> "1778 Shape_1420" [label="[]", style=solid]; +"1771 Transpose_1413" -> "1781 Shape_1423" [label="[]", style=solid]; +"1771 Transpose_1413" -> "1792 Reshape_1431" [label="[]", style=solid]; +"1772 Shape_1414" -> "1774 Gather_1416" [label="[-1]", style=dashed]; +"1773 Constant_1415" -> "1774 Gather_1416" [label="[]", style=dashed]; +"1774 Gather_1416" -> "1786 Unsqueeze_1427" [label="[]", style=dashed]; +"1775 Shape_1417" -> "1777 Gather_1419" [label="[-1]", style=dashed]; +"1776 Constant_1418" -> "1777 Gather_1419" [label="[]", style=dashed]; +"1777 Gather_1419" -> "1788 Unsqueeze_1428" [label="[]", style=dashed]; +"1778 Shape_1420" -> "1780 Gather_1422" [label="[-1]", style=dashed]; +"1779 Constant_1421" -> "1780 Gather_1422" [label="[]", style=dashed]; +"1780 Gather_1422" -> "1784 Mul_1426" [label="[]", style=dashed]; +"1781 Shape_1423" -> "1783 Gather_1425" [label="[-1]", style=dashed]; +"1782 Constant_1424" -> "1783 Gather_1425" [label="[]", style=dashed]; +"1783 Gather_1425" -> "1784 Mul_1426" [label="[]", style=dashed]; +"1784 Mul_1426" -> "1790 Unsqueeze_1429" [label="[]", style=dashed]; +"1785 Constant_nncf_1651" -> "1786 Unsqueeze_1427" [label="[1]", style=dashed]; +"1786 Unsqueeze_1427" -> "1791 Concat_1430" [label="[1]", style=dashed]; +"1787 Constant_nncf_1653" -> "1788 Unsqueeze_1428" [label="[1]", style=dashed]; +"1788 Unsqueeze_1428" -> "1791 Concat_1430" [label="[1]", style=dashed]; +"1789 Constant_nncf_1655" -> "1790 Unsqueeze_1429" [label="[1]", style=dashed]; +"1790 Unsqueeze_1429" -> "1791 Concat_1430" [label="[1]", style=dashed]; +"1791 Concat_1430" -> "1792 Reshape_1431" [label="[3]", style=dashed]; +"1792 Reshape_1431" -> "1793 Shape_1432" [label="[]", style=solid]; +"1792 Reshape_1431" -> "1796 Shape_1435" [label="[]", style=solid]; +"1792 Reshape_1431" -> "1799 Shape_1438" [label="[]", style=solid]; +"1792 Reshape_1431" -> "1805 Reshape_1443" [label="[]", style=solid]; +"1793 Shape_1432" -> "1795 Gather_1434" [label="[-1]", style=dashed]; +"1794 Constant_1433" -> "1795 Gather_1434" [label="[]", style=dashed]; +"1795 Gather_1434" -> "1810 Unsqueeze_1445" [label="[]", style=dashed]; +"1796 Shape_1435" -> "1798 Gather_1437" [label="[-1]", style=dashed]; +"1797 Constant_1436" -> "1798 Gather_1437" [label="[]", style=dashed]; +"1798 Gather_1437" -> "1812 Unsqueeze_1446" [label="[]", style=dashed]; +"1799 Shape_1438" -> "1801 Gather_1440" [label="[-1]", style=dashed]; +"1800 Constant_1439" -> "1801 Gather_1440" [label="[]", style=dashed]; +"1801 Gather_1440" -> "1803 Unsqueeze_1441" [label="[]", style=dashed]; +"1802 Constant_nncf_1668" -> "1803 Unsqueeze_1441" [label="[1]", style=dashed]; +"1803 Unsqueeze_1441" -> "1804 Concat_1442" [label="[1]", style=dashed]; +"1804 Concat_1442" -> "1805 Reshape_1443" [label="[2]", style=dashed]; +"1805 Reshape_1443" -> "1808 Gemm_1444" [label="[]", style=solid]; +"1806 QuantizeLinear_h.6.attn.c_proj.weight_1" -> "1807 DequantizeLinear_h.6.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"1807 DequantizeLinear_h.6.attn.c_proj.weight_1" -> "1808 Gemm_1444" [label="[768, 768]", style=solid]; +"1808 Gemm_1444" -> "1814 Reshape_1448" [label="[]", style=solid]; +"1809 Constant_nncf_1673" -> "1810 Unsqueeze_1445" [label="[1]", style=dashed]; +"1810 Unsqueeze_1445" -> "1813 Concat_1447" [label="[1]", style=dashed]; +"1811 Constant_nncf_1675" -> "1812 Unsqueeze_1446" [label="[1]", style=dashed]; +"1812 Unsqueeze_1446" -> "1813 Concat_1447" [label="[1]", style=dashed]; +"1813 Concat_1447" -> "1814 Reshape_1448" [label="[3]", style=dashed]; +"1814 Reshape_1448" -> "1815 Add_1449" [label="[]", style=solid]; +"1815 Add_1449" -> "1816 ReduceMean_1450" [label="[]", style=solid]; +"1815 Add_1449" -> "1817 Sub_1451" [label="[]", style=solid]; +"1815 Add_1449" -> "1888 Add_1508" [label="[]", style=solid]; +"1816 ReduceMean_1450" -> "1817 Sub_1451" [label="[]", style=solid]; +"1817 Sub_1451" -> "1819 Pow_1453" [label="[]", style=solid]; +"1817 Sub_1451" -> "1824 Div_1458" [label="[]", style=solid]; +"1818 Constant_1452" -> "1819 Pow_1453" [label="[]", style=solid]; +"1819 Pow_1453" -> "1820 ReduceMean_1454" [label="[]", style=solid]; +"1820 ReduceMean_1454" -> "1822 Add_1456" [label="[]", style=solid]; +"1821 Constant_1455" -> "1822 Add_1456" [label="[]", style=solid]; +"1822 Add_1456" -> "1823 Sqrt_1457" [label="[]", style=solid]; +"1823 Sqrt_1457" -> "1824 Div_1458" [label="[]", style=solid]; +"1824 Div_1458" -> "1825 Mul_1459" [label="[]", style=solid]; +"1825 Mul_1459" -> "1826 Add_1460" [label="[]", style=solid]; +"1826 Add_1460" -> "1827 QuantizeLinear_1831_1" [label="[]", style=solid]; +"1827 QuantizeLinear_1831_1" -> "1828 DequantizeLinear_1831_1" [label="[]", style=dashed]; +"1828 DequantizeLinear_1831_1" -> "1829 Shape_1461" [label="[]", style=solid]; +"1828 DequantizeLinear_1831_1" -> "1832 Shape_1464" [label="[]", style=solid]; +"1828 DequantizeLinear_1831_1" -> "1835 Shape_1467" [label="[]", style=solid]; +"1828 DequantizeLinear_1831_1" -> "1841 Reshape_1472" [label="[]", style=solid]; +"1829 Shape_1461" -> "1831 Gather_1463" [label="[-1]", style=dashed]; +"1830 Constant_1462" -> "1831 Gather_1463" [label="[]", style=dashed]; +"1831 Gather_1463" -> "1846 Unsqueeze_1474" [label="[]", style=dashed]; +"1832 Shape_1464" -> "1834 Gather_1466" [label="[-1]", style=dashed]; +"1833 Constant_1465" -> "1834 Gather_1466" [label="[]", style=dashed]; +"1834 Gather_1466" -> "1848 Unsqueeze_1475" [label="[]", style=dashed]; +"1835 Shape_1467" -> "1837 Gather_1469" [label="[-1]", style=dashed]; +"1836 Constant_1468" -> "1837 Gather_1469" [label="[]", style=dashed]; +"1837 Gather_1469" -> "1839 Unsqueeze_1470" [label="[]", style=dashed]; +"1838 Constant_nncf_1700" -> "1839 Unsqueeze_1470" [label="[1]", style=dashed]; +"1839 Unsqueeze_1470" -> "1840 Concat_1471" [label="[1]", style=dashed]; +"1840 Concat_1471" -> "1841 Reshape_1472" [label="[2]", style=dashed]; +"1841 Reshape_1472" -> "1844 Gemm_1473" [label="[]", style=solid]; +"1842 QuantizeLinear_h.6.mlp.c_fc.weight_1" -> "1843 DequantizeLinear_h.6.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"1843 DequantizeLinear_h.6.mlp.c_fc.weight_1" -> "1844 Gemm_1473" [label="[768, 3072]", style=solid]; +"1844 Gemm_1473" -> "1850 Reshape_1477" [label="[]", style=solid]; +"1845 Constant_nncf_1705" -> "1846 Unsqueeze_1474" [label="[1]", style=dashed]; +"1846 Unsqueeze_1474" -> "1849 Concat_1476" [label="[1]", style=dashed]; +"1847 Constant_nncf_1707" -> "1848 Unsqueeze_1475" [label="[1]", style=dashed]; +"1848 Unsqueeze_1475" -> "1849 Concat_1476" [label="[1]", style=dashed]; +"1849 Concat_1476" -> "1850 Reshape_1477" [label="[3]", style=dashed]; +"1850 Reshape_1477" -> "1852 Mul_1479" [label="[]", style=solid]; +"1850 Reshape_1477" -> "1854 Pow_1481" [label="[]", style=solid]; +"1850 Reshape_1477" -> "1857 Add_1484" [label="[]", style=solid]; +"1851 Constant_1478" -> "1852 Mul_1479" [label="[]", style=solid]; +"1852 Mul_1479" -> "1863 Mul_1490" [label="[]", style=solid]; +"1853 Constant_1480" -> "1854 Pow_1481" [label="[]", style=solid]; +"1854 Pow_1481" -> "1856 Mul_1483" [label="[]", style=solid]; +"1855 Constant_1482" -> "1856 Mul_1483" [label="[]", style=solid]; +"1856 Mul_1483" -> "1857 Add_1484" [label="[]", style=solid]; +"1857 Add_1484" -> "1859 Mul_1486" [label="[]", style=solid]; +"1858 Constant_1485" -> "1859 Mul_1486" [label="[]", style=solid]; +"1859 Mul_1486" -> "1860 Tanh_1487" [label="[]", style=solid]; +"1860 Tanh_1487" -> "1862 Add_1489" [label="[]", style=solid]; +"1861 Constant_1488" -> "1862 Add_1489" [label="[]", style=solid]; +"1862 Add_1489" -> "1863 Mul_1490" [label="[]", style=solid]; +"1863 Mul_1490" -> "1864 QuantizeLinear_1865_1" [label="[]", style=solid]; +"1864 QuantizeLinear_1865_1" -> "1865 DequantizeLinear_1865_1" [label="[]", style=dashed]; +"1865 DequantizeLinear_1865_1" -> "1866 Shape_1491" [label="[]", style=solid]; +"1865 DequantizeLinear_1865_1" -> "1869 Shape_1494" [label="[]", style=solid]; +"1865 DequantizeLinear_1865_1" -> "1872 Shape_1497" [label="[]", style=solid]; +"1865 DequantizeLinear_1865_1" -> "1878 Reshape_1502" [label="[]", style=solid]; +"1866 Shape_1491" -> "1868 Gather_1493" [label="[-1]", style=dashed]; +"1867 Constant_1492" -> "1868 Gather_1493" [label="[]", style=dashed]; +"1868 Gather_1493" -> "1883 Unsqueeze_1504" [label="[]", style=dashed]; +"1869 Shape_1494" -> "1871 Gather_1496" [label="[-1]", style=dashed]; +"1870 Constant_1495" -> "1871 Gather_1496" [label="[]", style=dashed]; +"1871 Gather_1496" -> "1885 Unsqueeze_1505" [label="[]", style=dashed]; +"1872 Shape_1497" -> "1874 Gather_1499" [label="[-1]", style=dashed]; +"1873 Constant_1498" -> "1874 Gather_1499" [label="[]", style=dashed]; +"1874 Gather_1499" -> "1876 Unsqueeze_1500" [label="[]", style=dashed]; +"1875 Constant_nncf_1733" -> "1876 Unsqueeze_1500" [label="[1]", style=dashed]; +"1876 Unsqueeze_1500" -> "1877 Concat_1501" [label="[1]", style=dashed]; +"1877 Concat_1501" -> "1878 Reshape_1502" [label="[2]", style=dashed]; +"1878 Reshape_1502" -> "1881 Gemm_1503" [label="[]", style=solid]; +"1879 QuantizeLinear_h.6.mlp.c_proj.weight_1" -> "1880 DequantizeLinear_h.6.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"1880 DequantizeLinear_h.6.mlp.c_proj.weight_1" -> "1881 Gemm_1503" [label="[3072, 768]", style=solid]; +"1881 Gemm_1503" -> "1887 Reshape_1507" [label="[]", style=solid]; +"1882 Constant_nncf_1738" -> "1883 Unsqueeze_1504" [label="[1]", style=dashed]; +"1883 Unsqueeze_1504" -> "1886 Concat_1506" [label="[1]", style=dashed]; +"1884 Constant_nncf_1740" -> "1885 Unsqueeze_1505" [label="[1]", style=dashed]; +"1885 Unsqueeze_1505" -> "1886 Concat_1506" [label="[1]", style=dashed]; +"1886 Concat_1506" -> "1887 Reshape_1507" [label="[3]", style=dashed]; +"1887 Reshape_1507" -> "1888 Add_1508" [label="[]", style=solid]; +"1888 Add_1508" -> "1889 ReduceMean_1509" [label="[]", style=solid]; +"1888 Add_1508" -> "1890 Sub_1510" [label="[]", style=solid]; +"1888 Add_1508" -> "2079 Add_1660" [label="[]", style=solid]; +"1889 ReduceMean_1509" -> "1890 Sub_1510" [label="[]", style=solid]; +"1890 Sub_1510" -> "1892 Pow_1512" [label="[]", style=solid]; +"1890 Sub_1510" -> "1897 Div_1517" [label="[]", style=solid]; +"1891 Constant_1511" -> "1892 Pow_1512" [label="[]", style=solid]; +"1892 Pow_1512" -> "1893 ReduceMean_1513" [label="[]", style=solid]; +"1893 ReduceMean_1513" -> "1895 Add_1515" [label="[]", style=solid]; +"1894 Constant_1514" -> "1895 Add_1515" [label="[]", style=solid]; +"1895 Add_1515" -> "1896 Sqrt_1516" [label="[]", style=solid]; +"1896 Sqrt_1516" -> "1897 Div_1517" [label="[]", style=solid]; +"1897 Div_1517" -> "1898 Mul_1518" [label="[]", style=solid]; +"1898 Mul_1518" -> "1899 Add_1519" [label="[]", style=solid]; +"1899 Add_1519" -> "1900 QuantizeLinear_1898_1" [label="[]", style=solid]; +"1900 QuantizeLinear_1898_1" -> "1901 DequantizeLinear_1898_1" [label="[]", style=dashed]; +"1901 DequantizeLinear_1898_1" -> "1902 Shape_1520" [label="[]", style=solid]; +"1901 DequantizeLinear_1898_1" -> "1905 Shape_1523" [label="[]", style=solid]; +"1901 DequantizeLinear_1898_1" -> "1908 Shape_1526" [label="[]", style=solid]; +"1901 DequantizeLinear_1898_1" -> "1914 Reshape_1531" [label="[]", style=solid]; +"1902 Shape_1520" -> "1904 Gather_1522" [label="[-1]", style=dashed]; +"1903 Constant_1521" -> "1904 Gather_1522" [label="[]", style=dashed]; +"1904 Gather_1522" -> "1919 Unsqueeze_1533" [label="[]", style=dashed]; +"1905 Shape_1523" -> "1907 Gather_1525" [label="[-1]", style=dashed]; +"1906 Constant_1524" -> "1907 Gather_1525" [label="[]", style=dashed]; +"1907 Gather_1525" -> "1921 Unsqueeze_1534" [label="[]", style=dashed]; +"1908 Shape_1526" -> "1910 Gather_1528" [label="[-1]", style=dashed]; +"1909 Constant_1527" -> "1910 Gather_1528" [label="[]", style=dashed]; +"1910 Gather_1528" -> "1912 Unsqueeze_1529" [label="[]", style=dashed]; +"1911 Constant_nncf_1765" -> "1912 Unsqueeze_1529" [label="[1]", style=dashed]; +"1912 Unsqueeze_1529" -> "1913 Concat_1530" [label="[1]", style=dashed]; +"1913 Concat_1530" -> "1914 Reshape_1531" [label="[2]", style=dashed]; +"1914 Reshape_1531" -> "1917 Gemm_1532" [label="[]", style=solid]; +"1915 QuantizeLinear_h.7.attn.c_attn.weight_1" -> "1916 DequantizeLinear_h.7.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"1916 DequantizeLinear_h.7.attn.c_attn.weight_1" -> "1917 Gemm_1532" [label="[768, 2304]", style=solid]; +"1917 Gemm_1532" -> "1923 Reshape_1536" [label="[]", style=solid]; +"1918 Constant_nncf_1770" -> "1919 Unsqueeze_1533" [label="[1]", style=dashed]; +"1919 Unsqueeze_1533" -> "1922 Concat_1535" [label="[1]", style=dashed]; +"1920 Constant_nncf_1772" -> "1921 Unsqueeze_1534" [label="[1]", style=dashed]; +"1921 Unsqueeze_1534" -> "1922 Concat_1535" [label="[1]", style=dashed]; +"1922 Concat_1535" -> "1923 Reshape_1536" [label="[3]", style=dashed]; +"1923 Reshape_1536" -> "1925 Split_1537" [label="[]", style=solid]; +"1924 Constant_nncf_1776" -> "1925 Split_1537" [label="[3]", style=dashed]; +"1925 Split_1537" -> "1926 QuantizeLinear_query.15_1" [label="[]", style=solid]; +"1925 Split_1537" -> "1928 Shape_1538" [label="[]", style=solid]; +"1925 Split_1537" -> "1931 Shape_1541" [label="[]", style=solid]; +"1925 Split_1537" -> "1934 Shape_1544" [label="[]", style=solid]; +"1925 Split_1537" -> "1950 Shape_1557" [label="[]", style=solid]; +"1925 Split_1537" -> "1953 Shape_1560" [label="[]", style=solid]; +"1925 Split_1537" -> "1956 Shape_1563" [label="[]", style=solid]; +"1925 Split_1537" -> "1970 Reshape_1574" [label="[]", style=solid]; +"1925 Split_1537" -> "1974 Shape_1576" [label="[]", style=solid]; +"1925 Split_1537" -> "1977 Shape_1579" [label="[]", style=solid]; +"1925 Split_1537" -> "1980 Shape_1582" [label="[]", style=solid]; +"1925 Split_1537" -> "1994 Reshape_1593" [label="[]", style=solid]; +"1926 QuantizeLinear_query.15_1" -> "1927 DequantizeLinear_query.15_1" [label="[]", style=dashed]; +"1927 DequantizeLinear_query.15_1" -> "1948 Reshape_1555" [label="[]", style=solid]; +"1928 Shape_1538" -> "1930 Gather_1540" [label="[-1]", style=dashed]; +"1929 Constant_1539" -> "1930 Gather_1540" [label="[]", style=dashed]; +"1930 Gather_1540" -> "1942 Unsqueeze_1551" [label="[]", style=dashed]; +"1931 Shape_1541" -> "1933 Gather_1543" [label="[-1]", style=dashed]; +"1932 Constant_1542" -> "1933 Gather_1543" [label="[]", style=dashed]; +"1933 Gather_1543" -> "1944 Unsqueeze_1552" [label="[]", style=dashed]; +"1934 Shape_1544" -> "1936 Gather_1546" [label="[-1]", style=dashed]; +"1935 Constant_1545" -> "1936 Gather_1546" [label="[]", style=dashed]; +"1936 Gather_1546" -> "1938 Div_1548" [label="[]", style=dashed]; +"1937 Constant_1547" -> "1938 Div_1548" [label="[]", style=dashed]; +"1938 Div_1548" -> "1939 Cast_1549" [label="[]", style=dashed]; +"1939 Cast_1549" -> "1940 Cast_1550" [label="[]", style=dashed]; +"1940 Cast_1550" -> "1946 Unsqueeze_1553" [label="[]", style=dashed]; +"1941 Constant_nncf_1791" -> "1942 Unsqueeze_1551" [label="[1]", style=dashed]; +"1942 Unsqueeze_1551" -> "1947 Concat_1554" [label="[1]", style=dashed]; +"1943 Constant_nncf_1793" -> "1944 Unsqueeze_1552" [label="[1]", style=dashed]; +"1944 Unsqueeze_1552" -> "1947 Concat_1554" [label="[1]", style=dashed]; +"1945 Constant_nncf_1795" -> "1946 Unsqueeze_1553" [label="[1]", style=dashed]; +"1946 Unsqueeze_1553" -> "1947 Concat_1554" [label="[1]", style=dashed]; +"1947 Concat_1554" -> "1948 Reshape_1555" [label="[4]", style=dashed]; +"1948 Reshape_1555" -> "1949 Transpose_1556" [label="[]", style=solid]; +"1949 Transpose_1556" -> "2002 MatMul_1599" [label="[]", style=solid]; +"1950 Shape_1557" -> "1952 Gather_1559" [label="[-1]", style=dashed]; +"1951 Constant_1558" -> "1952 Gather_1559" [label="[]", style=dashed]; +"1952 Gather_1559" -> "1964 Unsqueeze_1570" [label="[]", style=dashed]; +"1953 Shape_1560" -> "1955 Gather_1562" [label="[-1]", style=dashed]; +"1954 Constant_1561" -> "1955 Gather_1562" [label="[]", style=dashed]; +"1955 Gather_1562" -> "1966 Unsqueeze_1571" [label="[]", style=dashed]; +"1956 Shape_1563" -> "1958 Gather_1565" [label="[-1]", style=dashed]; +"1957 Constant_1564" -> "1958 Gather_1565" [label="[]", style=dashed]; +"1958 Gather_1565" -> "1960 Div_1567" [label="[]", style=dashed]; +"1959 Constant_1566" -> "1960 Div_1567" [label="[]", style=dashed]; +"1960 Div_1567" -> "1961 Cast_1568" [label="[]", style=dashed]; +"1961 Cast_1568" -> "1962 Cast_1569" [label="[]", style=dashed]; +"1962 Cast_1569" -> "1968 Unsqueeze_1572" [label="[]", style=dashed]; +"1963 Constant_nncf_1813" -> "1964 Unsqueeze_1570" [label="[1]", style=dashed]; +"1964 Unsqueeze_1570" -> "1969 Concat_1573" [label="[1]", style=dashed]; +"1965 Constant_nncf_1815" -> "1966 Unsqueeze_1571" [label="[1]", style=dashed]; +"1966 Unsqueeze_1571" -> "1969 Concat_1573" [label="[1]", style=dashed]; +"1967 Constant_nncf_1817" -> "1968 Unsqueeze_1572" [label="[1]", style=dashed]; +"1968 Unsqueeze_1572" -> "1969 Concat_1573" [label="[1]", style=dashed]; +"1969 Concat_1573" -> "1970 Reshape_1574" [label="[4]", style=dashed]; +"1970 Reshape_1574" -> "1971 QuantizeLinear_1963_1" [label="[]", style=solid]; +"1970 Reshape_1574" -> "1996 Transpose_1595" [label="[]", style=solid]; +"1971 QuantizeLinear_1963_1" -> "1972 DequantizeLinear_1963_1" [label="[]", style=dashed]; +"1972 DequantizeLinear_1963_1" -> "1973 Transpose_1575" [label="[]", style=solid]; +"1973 Transpose_1575" -> "2002 MatMul_1599" [label="[]", style=solid]; +"1974 Shape_1576" -> "1976 Gather_1578" [label="[-1]", style=dashed]; +"1975 Constant_1577" -> "1976 Gather_1578" [label="[]", style=dashed]; +"1976 Gather_1578" -> "1988 Unsqueeze_1589" [label="[]", style=dashed]; +"1977 Shape_1579" -> "1979 Gather_1581" [label="[-1]", style=dashed]; +"1978 Constant_1580" -> "1979 Gather_1581" [label="[]", style=dashed]; +"1979 Gather_1581" -> "1990 Unsqueeze_1590" [label="[]", style=dashed]; +"1980 Shape_1582" -> "1982 Gather_1584" [label="[-1]", style=dashed]; +"1981 Constant_1583" -> "1982 Gather_1584" [label="[]", style=dashed]; +"1982 Gather_1584" -> "1984 Div_1586" [label="[]", style=dashed]; +"1983 Constant_1585" -> "1984 Div_1586" [label="[]", style=dashed]; +"1984 Div_1586" -> "1985 Cast_1587" [label="[]", style=dashed]; +"1985 Cast_1587" -> "1986 Cast_1588" [label="[]", style=dashed]; +"1986 Cast_1588" -> "1992 Unsqueeze_1591" [label="[]", style=dashed]; +"1987 Constant_nncf_1835" -> "1988 Unsqueeze_1589" [label="[1]", style=dashed]; +"1988 Unsqueeze_1589" -> "1993 Concat_1592" [label="[1]", style=dashed]; +"1989 Constant_nncf_1837" -> "1990 Unsqueeze_1590" [label="[1]", style=dashed]; +"1990 Unsqueeze_1590" -> "1993 Concat_1592" [label="[1]", style=dashed]; +"1991 Constant_nncf_1839" -> "1992 Unsqueeze_1591" [label="[1]", style=dashed]; +"1992 Unsqueeze_1591" -> "1993 Concat_1592" [label="[1]", style=dashed]; +"1993 Concat_1592" -> "1994 Reshape_1593" [label="[4]", style=dashed]; +"1994 Reshape_1593" -> "1995 Transpose_1594" [label="[]", style=solid]; +"1995 Transpose_1594" -> "2000 Unsqueeze_1597" [label="[]", style=solid]; +"1995 Transpose_1594" -> "2032 MatMul_1623" [label="[]", style=solid]; +"1996 Transpose_1595" -> "1998 Unsqueeze_1596" [label="[]", style=solid]; +"1997 Constant_nncf_1845" -> "1998 Unsqueeze_1596" [label="[1]", style=dashed]; +"1998 Unsqueeze_1596" -> "2001 Concat_1598" [label="[]", style=solid]; +"1999 Constant_nncf_1847" -> "2000 Unsqueeze_1597" [label="[1]", style=dashed]; +"2000 Unsqueeze_1597" -> "2001 Concat_1598" [label="[]", style=solid]; +"2001 Concat_1598" -> "3239 nncf_model_output_8" [label="[2, 1, 12, 8, 64]", style=solid]; +"2002 MatMul_1599" -> "2004 Div_1601" [label="[]", style=solid]; +"2003 Constant_1600" -> "2004 Div_1601" [label="[]", style=solid]; +"2004 Div_1601" -> "2005 Shape_1602" [label="[]", style=solid]; +"2004 Div_1601" -> "2008 Shape_1605" [label="[]", style=solid]; +"2004 Div_1601" -> "2022 Mul_1616" [label="[]", style=solid]; +"2005 Shape_1602" -> "2007 Gather_1604" [label="[-1]", style=dashed]; +"2006 Constant_1603" -> "2007 Gather_1604" [label="[]", style=dashed]; +"2007 Gather_1604" -> "2011 Sub_1608" [label="[]", style=dashed]; +"2008 Shape_1605" -> "2010 Gather_1607" [label="[-1]", style=dashed]; +"2009 Constant_1606" -> "2010 Gather_1607" [label="[]", style=dashed]; +"2010 Gather_1607" -> "2011 Sub_1608" [label="[]", style=dashed]; +"2010 Gather_1607" -> "2015 Unsqueeze_1610" [label="[]", style=dashed]; +"2010 Gather_1607" -> "2019 Unsqueeze_1613" [label="[]", style=dashed]; +"2011 Sub_1608" -> "2013 Unsqueeze_1609" [label="[]", style=dashed]; +"2012 Constant_nncf_1860" -> "2013 Unsqueeze_1609" [label="[1]", style=dashed]; +"2013 Unsqueeze_1609" -> "2017 Slice_1612" [label="[1]", style=dashed]; +"2014 Constant_nncf_1862" -> "2015 Unsqueeze_1610" [label="[1]", style=dashed]; +"2015 Unsqueeze_1610" -> "2017 Slice_1612" [label="[1]", style=dashed]; +"2016 Constant_1611" -> "2017 Slice_1612" [label="[1]", style=dashed]; +"2017 Slice_1612" -> "2021 Slice_1615" [label="[-1, -1, -1, -1]", style=solid]; +"2018 Constant_nncf_1866" -> "2019 Unsqueeze_1613" [label="[1]", style=dashed]; +"2019 Unsqueeze_1613" -> "2021 Slice_1615" [label="[1]", style=dashed]; +"2020 Constant_1614" -> "2021 Slice_1615" [label="[1]", style=dashed]; +"2021 Slice_1615" -> "2022 Mul_1616" [label="[-1, -1, -1, -1]", style=solid]; +"2021 Slice_1615" -> "2024 Sub_1618" [label="[-1, -1, -1, -1]", style=solid]; +"2022 Mul_1616" -> "2027 Sub_1621" [label="[]", style=solid]; +"2023 Constant_1617" -> "2024 Sub_1618" [label="[]", style=solid]; +"2024 Sub_1618" -> "2026 Mul_1620" [label="[-1, -1, -1, -1]", style=solid]; +"2025 Constant_1619" -> "2026 Mul_1620" [label="[]", style=solid]; +"2026 Mul_1620" -> "2027 Sub_1621" [label="[-1, -1, -1, -1]", style=solid]; +"2027 Sub_1621" -> "2028 Shape_nncf_1876" [label="[]", style=solid]; +"2027 Sub_1621" -> "2029 Flatten_nncf_1877" [label="[]", style=solid]; +"2028 Shape_nncf_1876" -> "2031 Reshape_nncf_1879" [label="[-1]", style=dashed]; +"2029 Flatten_nncf_1877" -> "2030 Softmax_1622" [label="[]", style=solid]; +"2030 Softmax_1622" -> "2031 Reshape_nncf_1879" [label="[]", style=solid]; +"2031 Reshape_nncf_1879" -> "2032 MatMul_1623" [label="[]", style=solid]; +"2032 MatMul_1623" -> "2033 QuantizeLinear_2020_1" [label="[]", style=solid]; +"2033 QuantizeLinear_2020_1" -> "2034 DequantizeLinear_2020_1" [label="[]", style=dashed]; +"2034 DequantizeLinear_2020_1" -> "2035 Transpose_1624" [label="[]", style=solid]; +"2035 Transpose_1624" -> "2036 Shape_1625" [label="[]", style=solid]; +"2035 Transpose_1624" -> "2039 Shape_1628" [label="[]", style=solid]; +"2035 Transpose_1624" -> "2042 Shape_1631" [label="[]", style=solid]; +"2035 Transpose_1624" -> "2045 Shape_1634" [label="[]", style=solid]; +"2035 Transpose_1624" -> "2056 Reshape_1642" [label="[]", style=solid]; +"2036 Shape_1625" -> "2038 Gather_1627" [label="[-1]", style=dashed]; +"2037 Constant_1626" -> "2038 Gather_1627" [label="[]", style=dashed]; +"2038 Gather_1627" -> "2050 Unsqueeze_1638" [label="[]", style=dashed]; +"2039 Shape_1628" -> "2041 Gather_1630" [label="[-1]", style=dashed]; +"2040 Constant_1629" -> "2041 Gather_1630" [label="[]", style=dashed]; +"2041 Gather_1630" -> "2052 Unsqueeze_1639" [label="[]", style=dashed]; +"2042 Shape_1631" -> "2044 Gather_1633" [label="[-1]", style=dashed]; +"2043 Constant_1632" -> "2044 Gather_1633" [label="[]", style=dashed]; +"2044 Gather_1633" -> "2048 Mul_1637" [label="[]", style=dashed]; +"2045 Shape_1634" -> "2047 Gather_1636" [label="[-1]", style=dashed]; +"2046 Constant_1635" -> "2047 Gather_1636" [label="[]", style=dashed]; +"2047 Gather_1636" -> "2048 Mul_1637" [label="[]", style=dashed]; +"2048 Mul_1637" -> "2054 Unsqueeze_1640" [label="[]", style=dashed]; +"2049 Constant_nncf_1895" -> "2050 Unsqueeze_1638" [label="[1]", style=dashed]; +"2050 Unsqueeze_1638" -> "2055 Concat_1641" [label="[1]", style=dashed]; +"2051 Constant_nncf_1897" -> "2052 Unsqueeze_1639" [label="[1]", style=dashed]; +"2052 Unsqueeze_1639" -> "2055 Concat_1641" [label="[1]", style=dashed]; +"2053 Constant_nncf_1899" -> "2054 Unsqueeze_1640" [label="[1]", style=dashed]; +"2054 Unsqueeze_1640" -> "2055 Concat_1641" [label="[1]", style=dashed]; +"2055 Concat_1641" -> "2056 Reshape_1642" [label="[3]", style=dashed]; +"2056 Reshape_1642" -> "2057 Shape_1643" [label="[]", style=solid]; +"2056 Reshape_1642" -> "2060 Shape_1646" [label="[]", style=solid]; +"2056 Reshape_1642" -> "2063 Shape_1649" [label="[]", style=solid]; +"2056 Reshape_1642" -> "2069 Reshape_1654" [label="[]", style=solid]; +"2057 Shape_1643" -> "2059 Gather_1645" [label="[-1]", style=dashed]; +"2058 Constant_1644" -> "2059 Gather_1645" [label="[]", style=dashed]; +"2059 Gather_1645" -> "2074 Unsqueeze_1656" [label="[]", style=dashed]; +"2060 Shape_1646" -> "2062 Gather_1648" [label="[-1]", style=dashed]; +"2061 Constant_1647" -> "2062 Gather_1648" [label="[]", style=dashed]; +"2062 Gather_1648" -> "2076 Unsqueeze_1657" [label="[]", style=dashed]; +"2063 Shape_1649" -> "2065 Gather_1651" [label="[-1]", style=dashed]; +"2064 Constant_1650" -> "2065 Gather_1651" [label="[]", style=dashed]; +"2065 Gather_1651" -> "2067 Unsqueeze_1652" [label="[]", style=dashed]; +"2066 Constant_nncf_1912" -> "2067 Unsqueeze_1652" [label="[1]", style=dashed]; +"2067 Unsqueeze_1652" -> "2068 Concat_1653" [label="[1]", style=dashed]; +"2068 Concat_1653" -> "2069 Reshape_1654" [label="[2]", style=dashed]; +"2069 Reshape_1654" -> "2072 Gemm_1655" [label="[]", style=solid]; +"2070 QuantizeLinear_h.7.attn.c_proj.weight_1" -> "2071 DequantizeLinear_h.7.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"2071 DequantizeLinear_h.7.attn.c_proj.weight_1" -> "2072 Gemm_1655" [label="[768, 768]", style=solid]; +"2072 Gemm_1655" -> "2078 Reshape_1659" [label="[]", style=solid]; +"2073 Constant_nncf_1917" -> "2074 Unsqueeze_1656" [label="[1]", style=dashed]; +"2074 Unsqueeze_1656" -> "2077 Concat_1658" [label="[1]", style=dashed]; +"2075 Constant_nncf_1919" -> "2076 Unsqueeze_1657" [label="[1]", style=dashed]; +"2076 Unsqueeze_1657" -> "2077 Concat_1658" [label="[1]", style=dashed]; +"2077 Concat_1658" -> "2078 Reshape_1659" [label="[3]", style=dashed]; +"2078 Reshape_1659" -> "2079 Add_1660" [label="[]", style=solid]; +"2079 Add_1660" -> "2080 ReduceMean_1661" [label="[]", style=solid]; +"2079 Add_1660" -> "2081 Sub_1662" [label="[]", style=solid]; +"2079 Add_1660" -> "2152 Add_1719" [label="[]", style=solid]; +"2080 ReduceMean_1661" -> "2081 Sub_1662" [label="[]", style=solid]; +"2081 Sub_1662" -> "2083 Pow_1664" [label="[]", style=solid]; +"2081 Sub_1662" -> "2088 Div_1669" [label="[]", style=solid]; +"2082 Constant_1663" -> "2083 Pow_1664" [label="[]", style=solid]; +"2083 Pow_1664" -> "2084 ReduceMean_1665" [label="[]", style=solid]; +"2084 ReduceMean_1665" -> "2086 Add_1667" [label="[]", style=solid]; +"2085 Constant_1666" -> "2086 Add_1667" [label="[]", style=solid]; +"2086 Add_1667" -> "2087 Sqrt_1668" [label="[]", style=solid]; +"2087 Sqrt_1668" -> "2088 Div_1669" [label="[]", style=solid]; +"2088 Div_1669" -> "2089 Mul_1670" [label="[]", style=solid]; +"2089 Mul_1670" -> "2090 Add_1671" [label="[]", style=solid]; +"2090 Add_1671" -> "2091 QuantizeLinear_2072_1" [label="[]", style=solid]; +"2091 QuantizeLinear_2072_1" -> "2092 DequantizeLinear_2072_1" [label="[]", style=dashed]; +"2092 DequantizeLinear_2072_1" -> "2093 Shape_1672" [label="[]", style=solid]; +"2092 DequantizeLinear_2072_1" -> "2096 Shape_1675" [label="[]", style=solid]; +"2092 DequantizeLinear_2072_1" -> "2099 Shape_1678" [label="[]", style=solid]; +"2092 DequantizeLinear_2072_1" -> "2105 Reshape_1683" [label="[]", style=solid]; +"2093 Shape_1672" -> "2095 Gather_1674" [label="[-1]", style=dashed]; +"2094 Constant_1673" -> "2095 Gather_1674" [label="[]", style=dashed]; +"2095 Gather_1674" -> "2110 Unsqueeze_1685" [label="[]", style=dashed]; +"2096 Shape_1675" -> "2098 Gather_1677" [label="[-1]", style=dashed]; +"2097 Constant_1676" -> "2098 Gather_1677" [label="[]", style=dashed]; +"2098 Gather_1677" -> "2112 Unsqueeze_1686" [label="[]", style=dashed]; +"2099 Shape_1678" -> "2101 Gather_1680" [label="[-1]", style=dashed]; +"2100 Constant_1679" -> "2101 Gather_1680" [label="[]", style=dashed]; +"2101 Gather_1680" -> "2103 Unsqueeze_1681" [label="[]", style=dashed]; +"2102 Constant_nncf_1944" -> "2103 Unsqueeze_1681" [label="[1]", style=dashed]; +"2103 Unsqueeze_1681" -> "2104 Concat_1682" [label="[1]", style=dashed]; +"2104 Concat_1682" -> "2105 Reshape_1683" [label="[2]", style=dashed]; +"2105 Reshape_1683" -> "2108 Gemm_1684" [label="[]", style=solid]; +"2106 QuantizeLinear_h.7.mlp.c_fc.weight_1" -> "2107 DequantizeLinear_h.7.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"2107 DequantizeLinear_h.7.mlp.c_fc.weight_1" -> "2108 Gemm_1684" [label="[768, 3072]", style=solid]; +"2108 Gemm_1684" -> "2114 Reshape_1688" [label="[]", style=solid]; +"2109 Constant_nncf_1949" -> "2110 Unsqueeze_1685" [label="[1]", style=dashed]; +"2110 Unsqueeze_1685" -> "2113 Concat_1687" [label="[1]", style=dashed]; +"2111 Constant_nncf_1951" -> "2112 Unsqueeze_1686" [label="[1]", style=dashed]; +"2112 Unsqueeze_1686" -> "2113 Concat_1687" [label="[1]", style=dashed]; +"2113 Concat_1687" -> "2114 Reshape_1688" [label="[3]", style=dashed]; +"2114 Reshape_1688" -> "2116 Mul_1690" [label="[]", style=solid]; +"2114 Reshape_1688" -> "2118 Pow_1692" [label="[]", style=solid]; +"2114 Reshape_1688" -> "2121 Add_1695" [label="[]", style=solid]; +"2115 Constant_1689" -> "2116 Mul_1690" [label="[]", style=solid]; +"2116 Mul_1690" -> "2127 Mul_1701" [label="[]", style=solid]; +"2117 Constant_1691" -> "2118 Pow_1692" [label="[]", style=solid]; +"2118 Pow_1692" -> "2120 Mul_1694" [label="[]", style=solid]; +"2119 Constant_1693" -> "2120 Mul_1694" [label="[]", style=solid]; +"2120 Mul_1694" -> "2121 Add_1695" [label="[]", style=solid]; +"2121 Add_1695" -> "2123 Mul_1697" [label="[]", style=solid]; +"2122 Constant_1696" -> "2123 Mul_1697" [label="[]", style=solid]; +"2123 Mul_1697" -> "2124 Tanh_1698" [label="[]", style=solid]; +"2124 Tanh_1698" -> "2126 Add_1700" [label="[]", style=solid]; +"2125 Constant_1699" -> "2126 Add_1700" [label="[]", style=solid]; +"2126 Add_1700" -> "2127 Mul_1701" [label="[]", style=solid]; +"2127 Mul_1701" -> "2128 QuantizeLinear_2106_1" [label="[]", style=solid]; +"2128 QuantizeLinear_2106_1" -> "2129 DequantizeLinear_2106_1" [label="[]", style=dashed]; +"2129 DequantizeLinear_2106_1" -> "2130 Shape_1702" [label="[]", style=solid]; +"2129 DequantizeLinear_2106_1" -> "2133 Shape_1705" [label="[]", style=solid]; +"2129 DequantizeLinear_2106_1" -> "2136 Shape_1708" [label="[]", style=solid]; +"2129 DequantizeLinear_2106_1" -> "2142 Reshape_1713" [label="[]", style=solid]; +"2130 Shape_1702" -> "2132 Gather_1704" [label="[-1]", style=dashed]; +"2131 Constant_1703" -> "2132 Gather_1704" [label="[]", style=dashed]; +"2132 Gather_1704" -> "2147 Unsqueeze_1715" [label="[]", style=dashed]; +"2133 Shape_1705" -> "2135 Gather_1707" [label="[-1]", style=dashed]; +"2134 Constant_1706" -> "2135 Gather_1707" [label="[]", style=dashed]; +"2135 Gather_1707" -> "2149 Unsqueeze_1716" [label="[]", style=dashed]; +"2136 Shape_1708" -> "2138 Gather_1710" [label="[-1]", style=dashed]; +"2137 Constant_1709" -> "2138 Gather_1710" [label="[]", style=dashed]; +"2138 Gather_1710" -> "2140 Unsqueeze_1711" [label="[]", style=dashed]; +"2139 Constant_nncf_1977" -> "2140 Unsqueeze_1711" [label="[1]", style=dashed]; +"2140 Unsqueeze_1711" -> "2141 Concat_1712" [label="[1]", style=dashed]; +"2141 Concat_1712" -> "2142 Reshape_1713" [label="[2]", style=dashed]; +"2142 Reshape_1713" -> "2145 Gemm_1714" [label="[]", style=solid]; +"2143 QuantizeLinear_h.7.mlp.c_proj.weight_1" -> "2144 DequantizeLinear_h.7.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"2144 DequantizeLinear_h.7.mlp.c_proj.weight_1" -> "2145 Gemm_1714" [label="[3072, 768]", style=solid]; +"2145 Gemm_1714" -> "2151 Reshape_1718" [label="[]", style=solid]; +"2146 Constant_nncf_1982" -> "2147 Unsqueeze_1715" [label="[1]", style=dashed]; +"2147 Unsqueeze_1715" -> "2150 Concat_1717" [label="[1]", style=dashed]; +"2148 Constant_nncf_1984" -> "2149 Unsqueeze_1716" [label="[1]", style=dashed]; +"2149 Unsqueeze_1716" -> "2150 Concat_1717" [label="[1]", style=dashed]; +"2150 Concat_1717" -> "2151 Reshape_1718" [label="[3]", style=dashed]; +"2151 Reshape_1718" -> "2152 Add_1719" [label="[]", style=solid]; +"2152 Add_1719" -> "2153 ReduceMean_1720" [label="[]", style=solid]; +"2152 Add_1719" -> "2154 Sub_1721" [label="[]", style=solid]; +"2152 Add_1719" -> "2343 Add_1871" [label="[]", style=solid]; +"2153 ReduceMean_1720" -> "2154 Sub_1721" [label="[]", style=solid]; +"2154 Sub_1721" -> "2156 Pow_1723" [label="[]", style=solid]; +"2154 Sub_1721" -> "2161 Div_1728" [label="[]", style=solid]; +"2155 Constant_1722" -> "2156 Pow_1723" [label="[]", style=solid]; +"2156 Pow_1723" -> "2157 ReduceMean_1724" [label="[]", style=solid]; +"2157 ReduceMean_1724" -> "2159 Add_1726" [label="[]", style=solid]; +"2158 Constant_1725" -> "2159 Add_1726" [label="[]", style=solid]; +"2159 Add_1726" -> "2160 Sqrt_1727" [label="[]", style=solid]; +"2160 Sqrt_1727" -> "2161 Div_1728" [label="[]", style=solid]; +"2161 Div_1728" -> "2162 Mul_1729" [label="[]", style=solid]; +"2162 Mul_1729" -> "2163 Add_1730" [label="[]", style=solid]; +"2163 Add_1730" -> "2164 QuantizeLinear_2139_1" [label="[]", style=solid]; +"2164 QuantizeLinear_2139_1" -> "2165 DequantizeLinear_2139_1" [label="[]", style=dashed]; +"2165 DequantizeLinear_2139_1" -> "2166 Shape_1731" [label="[]", style=solid]; +"2165 DequantizeLinear_2139_1" -> "2169 Shape_1734" [label="[]", style=solid]; +"2165 DequantizeLinear_2139_1" -> "2172 Shape_1737" [label="[]", style=solid]; +"2165 DequantizeLinear_2139_1" -> "2178 Reshape_1742" [label="[]", style=solid]; +"2166 Shape_1731" -> "2168 Gather_1733" [label="[-1]", style=dashed]; +"2167 Constant_1732" -> "2168 Gather_1733" [label="[]", style=dashed]; +"2168 Gather_1733" -> "2183 Unsqueeze_1744" [label="[]", style=dashed]; +"2169 Shape_1734" -> "2171 Gather_1736" [label="[-1]", style=dashed]; +"2170 Constant_1735" -> "2171 Gather_1736" [label="[]", style=dashed]; +"2171 Gather_1736" -> "2185 Unsqueeze_1745" [label="[]", style=dashed]; +"2172 Shape_1737" -> "2174 Gather_1739" [label="[-1]", style=dashed]; +"2173 Constant_1738" -> "2174 Gather_1739" [label="[]", style=dashed]; +"2174 Gather_1739" -> "2176 Unsqueeze_1740" [label="[]", style=dashed]; +"2175 Constant_nncf_2009" -> "2176 Unsqueeze_1740" [label="[1]", style=dashed]; +"2176 Unsqueeze_1740" -> "2177 Concat_1741" [label="[1]", style=dashed]; +"2177 Concat_1741" -> "2178 Reshape_1742" [label="[2]", style=dashed]; +"2178 Reshape_1742" -> "2181 Gemm_1743" [label="[]", style=solid]; +"2179 QuantizeLinear_h.8.attn.c_attn.weight_1" -> "2180 DequantizeLinear_h.8.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"2180 DequantizeLinear_h.8.attn.c_attn.weight_1" -> "2181 Gemm_1743" [label="[768, 2304]", style=solid]; +"2181 Gemm_1743" -> "2187 Reshape_1747" [label="[]", style=solid]; +"2182 Constant_nncf_2014" -> "2183 Unsqueeze_1744" [label="[1]", style=dashed]; +"2183 Unsqueeze_1744" -> "2186 Concat_1746" [label="[1]", style=dashed]; +"2184 Constant_nncf_2016" -> "2185 Unsqueeze_1745" [label="[1]", style=dashed]; +"2185 Unsqueeze_1745" -> "2186 Concat_1746" [label="[1]", style=dashed]; +"2186 Concat_1746" -> "2187 Reshape_1747" [label="[3]", style=dashed]; +"2187 Reshape_1747" -> "2189 Split_1748" [label="[]", style=solid]; +"2188 Constant_nncf_2020" -> "2189 Split_1748" [label="[3]", style=dashed]; +"2189 Split_1748" -> "2190 QuantizeLinear_query.17_1" [label="[]", style=solid]; +"2189 Split_1748" -> "2192 Shape_1749" [label="[]", style=solid]; +"2189 Split_1748" -> "2195 Shape_1752" [label="[]", style=solid]; +"2189 Split_1748" -> "2198 Shape_1755" [label="[]", style=solid]; +"2189 Split_1748" -> "2214 Shape_1768" [label="[]", style=solid]; +"2189 Split_1748" -> "2217 Shape_1771" [label="[]", style=solid]; +"2189 Split_1748" -> "2220 Shape_1774" [label="[]", style=solid]; +"2189 Split_1748" -> "2234 Reshape_1785" [label="[]", style=solid]; +"2189 Split_1748" -> "2238 Shape_1787" [label="[]", style=solid]; +"2189 Split_1748" -> "2241 Shape_1790" [label="[]", style=solid]; +"2189 Split_1748" -> "2244 Shape_1793" [label="[]", style=solid]; +"2189 Split_1748" -> "2258 Reshape_1804" [label="[]", style=solid]; +"2190 QuantizeLinear_query.17_1" -> "2191 DequantizeLinear_query.17_1" [label="[]", style=dashed]; +"2191 DequantizeLinear_query.17_1" -> "2212 Reshape_1766" [label="[]", style=solid]; +"2192 Shape_1749" -> "2194 Gather_1751" [label="[-1]", style=dashed]; +"2193 Constant_1750" -> "2194 Gather_1751" [label="[]", style=dashed]; +"2194 Gather_1751" -> "2206 Unsqueeze_1762" [label="[]", style=dashed]; +"2195 Shape_1752" -> "2197 Gather_1754" [label="[-1]", style=dashed]; +"2196 Constant_1753" -> "2197 Gather_1754" [label="[]", style=dashed]; +"2197 Gather_1754" -> "2208 Unsqueeze_1763" [label="[]", style=dashed]; +"2198 Shape_1755" -> "2200 Gather_1757" [label="[-1]", style=dashed]; +"2199 Constant_1756" -> "2200 Gather_1757" [label="[]", style=dashed]; +"2200 Gather_1757" -> "2202 Div_1759" [label="[]", style=dashed]; +"2201 Constant_1758" -> "2202 Div_1759" [label="[]", style=dashed]; +"2202 Div_1759" -> "2203 Cast_1760" [label="[]", style=dashed]; +"2203 Cast_1760" -> "2204 Cast_1761" [label="[]", style=dashed]; +"2204 Cast_1761" -> "2210 Unsqueeze_1764" [label="[]", style=dashed]; +"2205 Constant_nncf_2035" -> "2206 Unsqueeze_1762" [label="[1]", style=dashed]; +"2206 Unsqueeze_1762" -> "2211 Concat_1765" [label="[1]", style=dashed]; +"2207 Constant_nncf_2037" -> "2208 Unsqueeze_1763" [label="[1]", style=dashed]; +"2208 Unsqueeze_1763" -> "2211 Concat_1765" [label="[1]", style=dashed]; +"2209 Constant_nncf_2039" -> "2210 Unsqueeze_1764" [label="[1]", style=dashed]; +"2210 Unsqueeze_1764" -> "2211 Concat_1765" [label="[1]", style=dashed]; +"2211 Concat_1765" -> "2212 Reshape_1766" [label="[4]", style=dashed]; +"2212 Reshape_1766" -> "2213 Transpose_1767" [label="[]", style=solid]; +"2213 Transpose_1767" -> "2266 MatMul_1810" [label="[]", style=solid]; +"2214 Shape_1768" -> "2216 Gather_1770" [label="[-1]", style=dashed]; +"2215 Constant_1769" -> "2216 Gather_1770" [label="[]", style=dashed]; +"2216 Gather_1770" -> "2228 Unsqueeze_1781" [label="[]", style=dashed]; +"2217 Shape_1771" -> "2219 Gather_1773" [label="[-1]", style=dashed]; +"2218 Constant_1772" -> "2219 Gather_1773" [label="[]", style=dashed]; +"2219 Gather_1773" -> "2230 Unsqueeze_1782" [label="[]", style=dashed]; +"2220 Shape_1774" -> "2222 Gather_1776" [label="[-1]", style=dashed]; +"2221 Constant_1775" -> "2222 Gather_1776" [label="[]", style=dashed]; +"2222 Gather_1776" -> "2224 Div_1778" [label="[]", style=dashed]; +"2223 Constant_1777" -> "2224 Div_1778" [label="[]", style=dashed]; +"2224 Div_1778" -> "2225 Cast_1779" [label="[]", style=dashed]; +"2225 Cast_1779" -> "2226 Cast_1780" [label="[]", style=dashed]; +"2226 Cast_1780" -> "2232 Unsqueeze_1783" [label="[]", style=dashed]; +"2227 Constant_nncf_2057" -> "2228 Unsqueeze_1781" [label="[1]", style=dashed]; +"2228 Unsqueeze_1781" -> "2233 Concat_1784" [label="[1]", style=dashed]; +"2229 Constant_nncf_2059" -> "2230 Unsqueeze_1782" [label="[1]", style=dashed]; +"2230 Unsqueeze_1782" -> "2233 Concat_1784" [label="[1]", style=dashed]; +"2231 Constant_nncf_2061" -> "2232 Unsqueeze_1783" [label="[1]", style=dashed]; +"2232 Unsqueeze_1783" -> "2233 Concat_1784" [label="[1]", style=dashed]; +"2233 Concat_1784" -> "2234 Reshape_1785" [label="[4]", style=dashed]; +"2234 Reshape_1785" -> "2235 QuantizeLinear_2204_1" [label="[]", style=solid]; +"2234 Reshape_1785" -> "2260 Transpose_1806" [label="[]", style=solid]; +"2235 QuantizeLinear_2204_1" -> "2236 DequantizeLinear_2204_1" [label="[]", style=dashed]; +"2236 DequantizeLinear_2204_1" -> "2237 Transpose_1786" [label="[]", style=solid]; +"2237 Transpose_1786" -> "2266 MatMul_1810" [label="[]", style=solid]; +"2238 Shape_1787" -> "2240 Gather_1789" [label="[-1]", style=dashed]; +"2239 Constant_1788" -> "2240 Gather_1789" [label="[]", style=dashed]; +"2240 Gather_1789" -> "2252 Unsqueeze_1800" [label="[]", style=dashed]; +"2241 Shape_1790" -> "2243 Gather_1792" [label="[-1]", style=dashed]; +"2242 Constant_1791" -> "2243 Gather_1792" [label="[]", style=dashed]; +"2243 Gather_1792" -> "2254 Unsqueeze_1801" [label="[]", style=dashed]; +"2244 Shape_1793" -> "2246 Gather_1795" [label="[-1]", style=dashed]; +"2245 Constant_1794" -> "2246 Gather_1795" [label="[]", style=dashed]; +"2246 Gather_1795" -> "2248 Div_1797" [label="[]", style=dashed]; +"2247 Constant_1796" -> "2248 Div_1797" [label="[]", style=dashed]; +"2248 Div_1797" -> "2249 Cast_1798" [label="[]", style=dashed]; +"2249 Cast_1798" -> "2250 Cast_1799" [label="[]", style=dashed]; +"2250 Cast_1799" -> "2256 Unsqueeze_1802" [label="[]", style=dashed]; +"2251 Constant_nncf_2079" -> "2252 Unsqueeze_1800" [label="[1]", style=dashed]; +"2252 Unsqueeze_1800" -> "2257 Concat_1803" [label="[1]", style=dashed]; +"2253 Constant_nncf_2081" -> "2254 Unsqueeze_1801" [label="[1]", style=dashed]; +"2254 Unsqueeze_1801" -> "2257 Concat_1803" [label="[1]", style=dashed]; +"2255 Constant_nncf_2083" -> "2256 Unsqueeze_1802" [label="[1]", style=dashed]; +"2256 Unsqueeze_1802" -> "2257 Concat_1803" [label="[1]", style=dashed]; +"2257 Concat_1803" -> "2258 Reshape_1804" [label="[4]", style=dashed]; +"2258 Reshape_1804" -> "2259 Transpose_1805" [label="[]", style=solid]; +"2259 Transpose_1805" -> "2264 Unsqueeze_1808" [label="[]", style=solid]; +"2259 Transpose_1805" -> "2296 MatMul_1834" [label="[]", style=solid]; +"2260 Transpose_1806" -> "2262 Unsqueeze_1807" [label="[]", style=solid]; +"2261 Constant_nncf_2089" -> "2262 Unsqueeze_1807" [label="[1]", style=dashed]; +"2262 Unsqueeze_1807" -> "2265 Concat_1809" [label="[]", style=solid]; +"2263 Constant_nncf_2091" -> "2264 Unsqueeze_1808" [label="[1]", style=dashed]; +"2264 Unsqueeze_1808" -> "2265 Concat_1809" [label="[]", style=solid]; +"2265 Concat_1809" -> "3240 nncf_model_output_9" [label="[2, 1, 12, 8, 64]", style=solid]; +"2266 MatMul_1810" -> "2268 Div_1812" [label="[]", style=solid]; +"2267 Constant_1811" -> "2268 Div_1812" [label="[]", style=solid]; +"2268 Div_1812" -> "2269 Shape_1813" [label="[]", style=solid]; +"2268 Div_1812" -> "2272 Shape_1816" [label="[]", style=solid]; +"2268 Div_1812" -> "2286 Mul_1827" [label="[]", style=solid]; +"2269 Shape_1813" -> "2271 Gather_1815" [label="[-1]", style=dashed]; +"2270 Constant_1814" -> "2271 Gather_1815" [label="[]", style=dashed]; +"2271 Gather_1815" -> "2275 Sub_1819" [label="[]", style=dashed]; +"2272 Shape_1816" -> "2274 Gather_1818" [label="[-1]", style=dashed]; +"2273 Constant_1817" -> "2274 Gather_1818" [label="[]", style=dashed]; +"2274 Gather_1818" -> "2275 Sub_1819" [label="[]", style=dashed]; +"2274 Gather_1818" -> "2279 Unsqueeze_1821" [label="[]", style=dashed]; +"2274 Gather_1818" -> "2283 Unsqueeze_1824" [label="[]", style=dashed]; +"2275 Sub_1819" -> "2277 Unsqueeze_1820" [label="[]", style=dashed]; +"2276 Constant_nncf_2104" -> "2277 Unsqueeze_1820" [label="[1]", style=dashed]; +"2277 Unsqueeze_1820" -> "2281 Slice_1823" [label="[1]", style=dashed]; +"2278 Constant_nncf_2106" -> "2279 Unsqueeze_1821" [label="[1]", style=dashed]; +"2279 Unsqueeze_1821" -> "2281 Slice_1823" [label="[1]", style=dashed]; +"2280 Constant_1822" -> "2281 Slice_1823" [label="[1]", style=dashed]; +"2281 Slice_1823" -> "2285 Slice_1826" [label="[-1, -1, -1, -1]", style=solid]; +"2282 Constant_nncf_2110" -> "2283 Unsqueeze_1824" [label="[1]", style=dashed]; +"2283 Unsqueeze_1824" -> "2285 Slice_1826" [label="[1]", style=dashed]; +"2284 Constant_1825" -> "2285 Slice_1826" [label="[1]", style=dashed]; +"2285 Slice_1826" -> "2286 Mul_1827" [label="[-1, -1, -1, -1]", style=solid]; +"2285 Slice_1826" -> "2288 Sub_1829" [label="[-1, -1, -1, -1]", style=solid]; +"2286 Mul_1827" -> "2291 Sub_1832" [label="[]", style=solid]; +"2287 Constant_1828" -> "2288 Sub_1829" [label="[]", style=solid]; +"2288 Sub_1829" -> "2290 Mul_1831" [label="[-1, -1, -1, -1]", style=solid]; +"2289 Constant_1830" -> "2290 Mul_1831" [label="[]", style=solid]; +"2290 Mul_1831" -> "2291 Sub_1832" [label="[-1, -1, -1, -1]", style=solid]; +"2291 Sub_1832" -> "2292 Shape_nncf_2120" [label="[]", style=solid]; +"2291 Sub_1832" -> "2293 Flatten_nncf_2121" [label="[]", style=solid]; +"2292 Shape_nncf_2120" -> "2295 Reshape_nncf_2123" [label="[-1]", style=dashed]; +"2293 Flatten_nncf_2121" -> "2294 Softmax_1833" [label="[]", style=solid]; +"2294 Softmax_1833" -> "2295 Reshape_nncf_2123" [label="[]", style=solid]; +"2295 Reshape_nncf_2123" -> "2296 MatMul_1834" [label="[]", style=solid]; +"2296 MatMul_1834" -> "2297 QuantizeLinear_2261_1" [label="[]", style=solid]; +"2297 QuantizeLinear_2261_1" -> "2298 DequantizeLinear_2261_1" [label="[]", style=dashed]; +"2298 DequantizeLinear_2261_1" -> "2299 Transpose_1835" [label="[]", style=solid]; +"2299 Transpose_1835" -> "2300 Shape_1836" [label="[]", style=solid]; +"2299 Transpose_1835" -> "2303 Shape_1839" [label="[]", style=solid]; +"2299 Transpose_1835" -> "2306 Shape_1842" [label="[]", style=solid]; +"2299 Transpose_1835" -> "2309 Shape_1845" [label="[]", style=solid]; +"2299 Transpose_1835" -> "2320 Reshape_1853" [label="[]", style=solid]; +"2300 Shape_1836" -> "2302 Gather_1838" [label="[-1]", style=dashed]; +"2301 Constant_1837" -> "2302 Gather_1838" [label="[]", style=dashed]; +"2302 Gather_1838" -> "2314 Unsqueeze_1849" [label="[]", style=dashed]; +"2303 Shape_1839" -> "2305 Gather_1841" [label="[-1]", style=dashed]; +"2304 Constant_1840" -> "2305 Gather_1841" [label="[]", style=dashed]; +"2305 Gather_1841" -> "2316 Unsqueeze_1850" [label="[]", style=dashed]; +"2306 Shape_1842" -> "2308 Gather_1844" [label="[-1]", style=dashed]; +"2307 Constant_1843" -> "2308 Gather_1844" [label="[]", style=dashed]; +"2308 Gather_1844" -> "2312 Mul_1848" [label="[]", style=dashed]; +"2309 Shape_1845" -> "2311 Gather_1847" [label="[-1]", style=dashed]; +"2310 Constant_1846" -> "2311 Gather_1847" [label="[]", style=dashed]; +"2311 Gather_1847" -> "2312 Mul_1848" [label="[]", style=dashed]; +"2312 Mul_1848" -> "2318 Unsqueeze_1851" [label="[]", style=dashed]; +"2313 Constant_nncf_2139" -> "2314 Unsqueeze_1849" [label="[1]", style=dashed]; +"2314 Unsqueeze_1849" -> "2319 Concat_1852" [label="[1]", style=dashed]; +"2315 Constant_nncf_2141" -> "2316 Unsqueeze_1850" [label="[1]", style=dashed]; +"2316 Unsqueeze_1850" -> "2319 Concat_1852" [label="[1]", style=dashed]; +"2317 Constant_nncf_2143" -> "2318 Unsqueeze_1851" [label="[1]", style=dashed]; +"2318 Unsqueeze_1851" -> "2319 Concat_1852" [label="[1]", style=dashed]; +"2319 Concat_1852" -> "2320 Reshape_1853" [label="[3]", style=dashed]; +"2320 Reshape_1853" -> "2321 Shape_1854" [label="[]", style=solid]; +"2320 Reshape_1853" -> "2324 Shape_1857" [label="[]", style=solid]; +"2320 Reshape_1853" -> "2327 Shape_1860" [label="[]", style=solid]; +"2320 Reshape_1853" -> "2333 Reshape_1865" [label="[]", style=solid]; +"2321 Shape_1854" -> "2323 Gather_1856" [label="[-1]", style=dashed]; +"2322 Constant_1855" -> "2323 Gather_1856" [label="[]", style=dashed]; +"2323 Gather_1856" -> "2338 Unsqueeze_1867" [label="[]", style=dashed]; +"2324 Shape_1857" -> "2326 Gather_1859" [label="[-1]", style=dashed]; +"2325 Constant_1858" -> "2326 Gather_1859" [label="[]", style=dashed]; +"2326 Gather_1859" -> "2340 Unsqueeze_1868" [label="[]", style=dashed]; +"2327 Shape_1860" -> "2329 Gather_1862" [label="[-1]", style=dashed]; +"2328 Constant_1861" -> "2329 Gather_1862" [label="[]", style=dashed]; +"2329 Gather_1862" -> "2331 Unsqueeze_1863" [label="[]", style=dashed]; +"2330 Constant_nncf_2156" -> "2331 Unsqueeze_1863" [label="[1]", style=dashed]; +"2331 Unsqueeze_1863" -> "2332 Concat_1864" [label="[1]", style=dashed]; +"2332 Concat_1864" -> "2333 Reshape_1865" [label="[2]", style=dashed]; +"2333 Reshape_1865" -> "2336 Gemm_1866" [label="[]", style=solid]; +"2334 QuantizeLinear_h.8.attn.c_proj.weight_1" -> "2335 DequantizeLinear_h.8.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"2335 DequantizeLinear_h.8.attn.c_proj.weight_1" -> "2336 Gemm_1866" [label="[768, 768]", style=solid]; +"2336 Gemm_1866" -> "2342 Reshape_1870" [label="[]", style=solid]; +"2337 Constant_nncf_2161" -> "2338 Unsqueeze_1867" [label="[1]", style=dashed]; +"2338 Unsqueeze_1867" -> "2341 Concat_1869" [label="[1]", style=dashed]; +"2339 Constant_nncf_2163" -> "2340 Unsqueeze_1868" [label="[1]", style=dashed]; +"2340 Unsqueeze_1868" -> "2341 Concat_1869" [label="[1]", style=dashed]; +"2341 Concat_1869" -> "2342 Reshape_1870" [label="[3]", style=dashed]; +"2342 Reshape_1870" -> "2343 Add_1871" [label="[]", style=solid]; +"2343 Add_1871" -> "2344 ReduceMean_1872" [label="[]", style=solid]; +"2343 Add_1871" -> "2345 Sub_1873" [label="[]", style=solid]; +"2343 Add_1871" -> "2416 Add_1930" [label="[]", style=solid]; +"2344 ReduceMean_1872" -> "2345 Sub_1873" [label="[]", style=solid]; +"2345 Sub_1873" -> "2347 Pow_1875" [label="[]", style=solid]; +"2345 Sub_1873" -> "2352 Div_1880" [label="[]", style=solid]; +"2346 Constant_1874" -> "2347 Pow_1875" [label="[]", style=solid]; +"2347 Pow_1875" -> "2348 ReduceMean_1876" [label="[]", style=solid]; +"2348 ReduceMean_1876" -> "2350 Add_1878" [label="[]", style=solid]; +"2349 Constant_1877" -> "2350 Add_1878" [label="[]", style=solid]; +"2350 Add_1878" -> "2351 Sqrt_1879" [label="[]", style=solid]; +"2351 Sqrt_1879" -> "2352 Div_1880" [label="[]", style=solid]; +"2352 Div_1880" -> "2353 Mul_1881" [label="[]", style=solid]; +"2353 Mul_1881" -> "2354 Add_1882" [label="[]", style=solid]; +"2354 Add_1882" -> "2355 QuantizeLinear_2313_1" [label="[]", style=solid]; +"2355 QuantizeLinear_2313_1" -> "2356 DequantizeLinear_2313_1" [label="[]", style=dashed]; +"2356 DequantizeLinear_2313_1" -> "2357 Shape_1883" [label="[]", style=solid]; +"2356 DequantizeLinear_2313_1" -> "2360 Shape_1886" [label="[]", style=solid]; +"2356 DequantizeLinear_2313_1" -> "2363 Shape_1889" [label="[]", style=solid]; +"2356 DequantizeLinear_2313_1" -> "2369 Reshape_1894" [label="[]", style=solid]; +"2357 Shape_1883" -> "2359 Gather_1885" [label="[-1]", style=dashed]; +"2358 Constant_1884" -> "2359 Gather_1885" [label="[]", style=dashed]; +"2359 Gather_1885" -> "2374 Unsqueeze_1896" [label="[]", style=dashed]; +"2360 Shape_1886" -> "2362 Gather_1888" [label="[-1]", style=dashed]; +"2361 Constant_1887" -> "2362 Gather_1888" [label="[]", style=dashed]; +"2362 Gather_1888" -> "2376 Unsqueeze_1897" [label="[]", style=dashed]; +"2363 Shape_1889" -> "2365 Gather_1891" [label="[-1]", style=dashed]; +"2364 Constant_1890" -> "2365 Gather_1891" [label="[]", style=dashed]; +"2365 Gather_1891" -> "2367 Unsqueeze_1892" [label="[]", style=dashed]; +"2366 Constant_nncf_2188" -> "2367 Unsqueeze_1892" [label="[1]", style=dashed]; +"2367 Unsqueeze_1892" -> "2368 Concat_1893" [label="[1]", style=dashed]; +"2368 Concat_1893" -> "2369 Reshape_1894" [label="[2]", style=dashed]; +"2369 Reshape_1894" -> "2372 Gemm_1895" [label="[]", style=solid]; +"2370 QuantizeLinear_h.8.mlp.c_fc.weight_1" -> "2371 DequantizeLinear_h.8.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"2371 DequantizeLinear_h.8.mlp.c_fc.weight_1" -> "2372 Gemm_1895" [label="[768, 3072]", style=solid]; +"2372 Gemm_1895" -> "2378 Reshape_1899" [label="[]", style=solid]; +"2373 Constant_nncf_2193" -> "2374 Unsqueeze_1896" [label="[1]", style=dashed]; +"2374 Unsqueeze_1896" -> "2377 Concat_1898" [label="[1]", style=dashed]; +"2375 Constant_nncf_2195" -> "2376 Unsqueeze_1897" [label="[1]", style=dashed]; +"2376 Unsqueeze_1897" -> "2377 Concat_1898" [label="[1]", style=dashed]; +"2377 Concat_1898" -> "2378 Reshape_1899" [label="[3]", style=dashed]; +"2378 Reshape_1899" -> "2380 Mul_1901" [label="[]", style=solid]; +"2378 Reshape_1899" -> "2382 Pow_1903" [label="[]", style=solid]; +"2378 Reshape_1899" -> "2385 Add_1906" [label="[]", style=solid]; +"2379 Constant_1900" -> "2380 Mul_1901" [label="[]", style=solid]; +"2380 Mul_1901" -> "2391 Mul_1912" [label="[]", style=solid]; +"2381 Constant_1902" -> "2382 Pow_1903" [label="[]", style=solid]; +"2382 Pow_1903" -> "2384 Mul_1905" [label="[]", style=solid]; +"2383 Constant_1904" -> "2384 Mul_1905" [label="[]", style=solid]; +"2384 Mul_1905" -> "2385 Add_1906" [label="[]", style=solid]; +"2385 Add_1906" -> "2387 Mul_1908" [label="[]", style=solid]; +"2386 Constant_1907" -> "2387 Mul_1908" [label="[]", style=solid]; +"2387 Mul_1908" -> "2388 Tanh_1909" [label="[]", style=solid]; +"2388 Tanh_1909" -> "2390 Add_1911" [label="[]", style=solid]; +"2389 Constant_1910" -> "2390 Add_1911" [label="[]", style=solid]; +"2390 Add_1911" -> "2391 Mul_1912" [label="[]", style=solid]; +"2391 Mul_1912" -> "2392 QuantizeLinear_2347_1" [label="[]", style=solid]; +"2392 QuantizeLinear_2347_1" -> "2393 DequantizeLinear_2347_1" [label="[]", style=dashed]; +"2393 DequantizeLinear_2347_1" -> "2394 Shape_1913" [label="[]", style=solid]; +"2393 DequantizeLinear_2347_1" -> "2397 Shape_1916" [label="[]", style=solid]; +"2393 DequantizeLinear_2347_1" -> "2400 Shape_1919" [label="[]", style=solid]; +"2393 DequantizeLinear_2347_1" -> "2406 Reshape_1924" [label="[]", style=solid]; +"2394 Shape_1913" -> "2396 Gather_1915" [label="[-1]", style=dashed]; +"2395 Constant_1914" -> "2396 Gather_1915" [label="[]", style=dashed]; +"2396 Gather_1915" -> "2411 Unsqueeze_1926" [label="[]", style=dashed]; +"2397 Shape_1916" -> "2399 Gather_1918" [label="[-1]", style=dashed]; +"2398 Constant_1917" -> "2399 Gather_1918" [label="[]", style=dashed]; +"2399 Gather_1918" -> "2413 Unsqueeze_1927" [label="[]", style=dashed]; +"2400 Shape_1919" -> "2402 Gather_1921" [label="[-1]", style=dashed]; +"2401 Constant_1920" -> "2402 Gather_1921" [label="[]", style=dashed]; +"2402 Gather_1921" -> "2404 Unsqueeze_1922" [label="[]", style=dashed]; +"2403 Constant_nncf_2221" -> "2404 Unsqueeze_1922" [label="[1]", style=dashed]; +"2404 Unsqueeze_1922" -> "2405 Concat_1923" [label="[1]", style=dashed]; +"2405 Concat_1923" -> "2406 Reshape_1924" [label="[2]", style=dashed]; +"2406 Reshape_1924" -> "2409 Gemm_1925" [label="[]", style=solid]; +"2407 QuantizeLinear_h.8.mlp.c_proj.weight_1" -> "2408 DequantizeLinear_h.8.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"2408 DequantizeLinear_h.8.mlp.c_proj.weight_1" -> "2409 Gemm_1925" [label="[3072, 768]", style=solid]; +"2409 Gemm_1925" -> "2415 Reshape_1929" [label="[]", style=solid]; +"2410 Constant_nncf_2226" -> "2411 Unsqueeze_1926" [label="[1]", style=dashed]; +"2411 Unsqueeze_1926" -> "2414 Concat_1928" [label="[1]", style=dashed]; +"2412 Constant_nncf_2228" -> "2413 Unsqueeze_1927" [label="[1]", style=dashed]; +"2413 Unsqueeze_1927" -> "2414 Concat_1928" [label="[1]", style=dashed]; +"2414 Concat_1928" -> "2415 Reshape_1929" [label="[3]", style=dashed]; +"2415 Reshape_1929" -> "2416 Add_1930" [label="[]", style=solid]; +"2416 Add_1930" -> "2417 ReduceMean_1931" [label="[]", style=solid]; +"2416 Add_1930" -> "2418 Sub_1932" [label="[]", style=solid]; +"2416 Add_1930" -> "2607 Add_2082" [label="[]", style=solid]; +"2417 ReduceMean_1931" -> "2418 Sub_1932" [label="[]", style=solid]; +"2418 Sub_1932" -> "2420 Pow_1934" [label="[]", style=solid]; +"2418 Sub_1932" -> "2425 Div_1939" [label="[]", style=solid]; +"2419 Constant_1933" -> "2420 Pow_1934" [label="[]", style=solid]; +"2420 Pow_1934" -> "2421 ReduceMean_1935" [label="[]", style=solid]; +"2421 ReduceMean_1935" -> "2423 Add_1937" [label="[]", style=solid]; +"2422 Constant_1936" -> "2423 Add_1937" [label="[]", style=solid]; +"2423 Add_1937" -> "2424 Sqrt_1938" [label="[]", style=solid]; +"2424 Sqrt_1938" -> "2425 Div_1939" [label="[]", style=solid]; +"2425 Div_1939" -> "2426 Mul_1940" [label="[]", style=solid]; +"2426 Mul_1940" -> "2427 Add_1941" [label="[]", style=solid]; +"2427 Add_1941" -> "2428 QuantizeLinear_2380_1" [label="[]", style=solid]; +"2428 QuantizeLinear_2380_1" -> "2429 DequantizeLinear_2380_1" [label="[]", style=dashed]; +"2429 DequantizeLinear_2380_1" -> "2430 Shape_1942" [label="[]", style=solid]; +"2429 DequantizeLinear_2380_1" -> "2433 Shape_1945" [label="[]", style=solid]; +"2429 DequantizeLinear_2380_1" -> "2436 Shape_1948" [label="[]", style=solid]; +"2429 DequantizeLinear_2380_1" -> "2442 Reshape_1953" [label="[]", style=solid]; +"2430 Shape_1942" -> "2432 Gather_1944" [label="[-1]", style=dashed]; +"2431 Constant_1943" -> "2432 Gather_1944" [label="[]", style=dashed]; +"2432 Gather_1944" -> "2447 Unsqueeze_1955" [label="[]", style=dashed]; +"2433 Shape_1945" -> "2435 Gather_1947" [label="[-1]", style=dashed]; +"2434 Constant_1946" -> "2435 Gather_1947" [label="[]", style=dashed]; +"2435 Gather_1947" -> "2449 Unsqueeze_1956" [label="[]", style=dashed]; +"2436 Shape_1948" -> "2438 Gather_1950" [label="[-1]", style=dashed]; +"2437 Constant_1949" -> "2438 Gather_1950" [label="[]", style=dashed]; +"2438 Gather_1950" -> "2440 Unsqueeze_1951" [label="[]", style=dashed]; +"2439 Constant_nncf_2253" -> "2440 Unsqueeze_1951" [label="[1]", style=dashed]; +"2440 Unsqueeze_1951" -> "2441 Concat_1952" [label="[1]", style=dashed]; +"2441 Concat_1952" -> "2442 Reshape_1953" [label="[2]", style=dashed]; +"2442 Reshape_1953" -> "2445 Gemm_1954" [label="[]", style=solid]; +"2443 QuantizeLinear_h.9.attn.c_attn.weight_1" -> "2444 DequantizeLinear_h.9.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"2444 DequantizeLinear_h.9.attn.c_attn.weight_1" -> "2445 Gemm_1954" [label="[768, 2304]", style=solid]; +"2445 Gemm_1954" -> "2451 Reshape_1958" [label="[]", style=solid]; +"2446 Constant_nncf_2258" -> "2447 Unsqueeze_1955" [label="[1]", style=dashed]; +"2447 Unsqueeze_1955" -> "2450 Concat_1957" [label="[1]", style=dashed]; +"2448 Constant_nncf_2260" -> "2449 Unsqueeze_1956" [label="[1]", style=dashed]; +"2449 Unsqueeze_1956" -> "2450 Concat_1957" [label="[1]", style=dashed]; +"2450 Concat_1957" -> "2451 Reshape_1958" [label="[3]", style=dashed]; +"2451 Reshape_1958" -> "2453 Split_1959" [label="[]", style=solid]; +"2452 Constant_nncf_2264" -> "2453 Split_1959" [label="[3]", style=dashed]; +"2453 Split_1959" -> "2454 QuantizeLinear_query.19_1" [label="[]", style=solid]; +"2453 Split_1959" -> "2456 Shape_1960" [label="[]", style=solid]; +"2453 Split_1959" -> "2459 Shape_1963" [label="[]", style=solid]; +"2453 Split_1959" -> "2462 Shape_1966" [label="[]", style=solid]; +"2453 Split_1959" -> "2478 Shape_1979" [label="[]", style=solid]; +"2453 Split_1959" -> "2481 Shape_1982" [label="[]", style=solid]; +"2453 Split_1959" -> "2484 Shape_1985" [label="[]", style=solid]; +"2453 Split_1959" -> "2498 Reshape_1996" [label="[]", style=solid]; +"2453 Split_1959" -> "2502 Shape_1998" [label="[]", style=solid]; +"2453 Split_1959" -> "2505 Shape_2001" [label="[]", style=solid]; +"2453 Split_1959" -> "2508 Shape_2004" [label="[]", style=solid]; +"2453 Split_1959" -> "2522 Reshape_2015" [label="[]", style=solid]; +"2454 QuantizeLinear_query.19_1" -> "2455 DequantizeLinear_query.19_1" [label="[]", style=dashed]; +"2455 DequantizeLinear_query.19_1" -> "2476 Reshape_1977" [label="[]", style=solid]; +"2456 Shape_1960" -> "2458 Gather_1962" [label="[-1]", style=dashed]; +"2457 Constant_1961" -> "2458 Gather_1962" [label="[]", style=dashed]; +"2458 Gather_1962" -> "2470 Unsqueeze_1973" [label="[]", style=dashed]; +"2459 Shape_1963" -> "2461 Gather_1965" [label="[-1]", style=dashed]; +"2460 Constant_1964" -> "2461 Gather_1965" [label="[]", style=dashed]; +"2461 Gather_1965" -> "2472 Unsqueeze_1974" [label="[]", style=dashed]; +"2462 Shape_1966" -> "2464 Gather_1968" [label="[-1]", style=dashed]; +"2463 Constant_1967" -> "2464 Gather_1968" [label="[]", style=dashed]; +"2464 Gather_1968" -> "2466 Div_1970" [label="[]", style=dashed]; +"2465 Constant_1969" -> "2466 Div_1970" [label="[]", style=dashed]; +"2466 Div_1970" -> "2467 Cast_1971" [label="[]", style=dashed]; +"2467 Cast_1971" -> "2468 Cast_1972" [label="[]", style=dashed]; +"2468 Cast_1972" -> "2474 Unsqueeze_1975" [label="[]", style=dashed]; +"2469 Constant_nncf_2279" -> "2470 Unsqueeze_1973" [label="[1]", style=dashed]; +"2470 Unsqueeze_1973" -> "2475 Concat_1976" [label="[1]", style=dashed]; +"2471 Constant_nncf_2281" -> "2472 Unsqueeze_1974" [label="[1]", style=dashed]; +"2472 Unsqueeze_1974" -> "2475 Concat_1976" [label="[1]", style=dashed]; +"2473 Constant_nncf_2283" -> "2474 Unsqueeze_1975" [label="[1]", style=dashed]; +"2474 Unsqueeze_1975" -> "2475 Concat_1976" [label="[1]", style=dashed]; +"2475 Concat_1976" -> "2476 Reshape_1977" [label="[4]", style=dashed]; +"2476 Reshape_1977" -> "2477 Transpose_1978" [label="[]", style=solid]; +"2477 Transpose_1978" -> "2530 MatMul_2021" [label="[]", style=solid]; +"2478 Shape_1979" -> "2480 Gather_1981" [label="[-1]", style=dashed]; +"2479 Constant_1980" -> "2480 Gather_1981" [label="[]", style=dashed]; +"2480 Gather_1981" -> "2492 Unsqueeze_1992" [label="[]", style=dashed]; +"2481 Shape_1982" -> "2483 Gather_1984" [label="[-1]", style=dashed]; +"2482 Constant_1983" -> "2483 Gather_1984" [label="[]", style=dashed]; +"2483 Gather_1984" -> "2494 Unsqueeze_1993" [label="[]", style=dashed]; +"2484 Shape_1985" -> "2486 Gather_1987" [label="[-1]", style=dashed]; +"2485 Constant_1986" -> "2486 Gather_1987" [label="[]", style=dashed]; +"2486 Gather_1987" -> "2488 Div_1989" [label="[]", style=dashed]; +"2487 Constant_1988" -> "2488 Div_1989" [label="[]", style=dashed]; +"2488 Div_1989" -> "2489 Cast_1990" [label="[]", style=dashed]; +"2489 Cast_1990" -> "2490 Cast_1991" [label="[]", style=dashed]; +"2490 Cast_1991" -> "2496 Unsqueeze_1994" [label="[]", style=dashed]; +"2491 Constant_nncf_2301" -> "2492 Unsqueeze_1992" [label="[1]", style=dashed]; +"2492 Unsqueeze_1992" -> "2497 Concat_1995" [label="[1]", style=dashed]; +"2493 Constant_nncf_2303" -> "2494 Unsqueeze_1993" [label="[1]", style=dashed]; +"2494 Unsqueeze_1993" -> "2497 Concat_1995" [label="[1]", style=dashed]; +"2495 Constant_nncf_2305" -> "2496 Unsqueeze_1994" [label="[1]", style=dashed]; +"2496 Unsqueeze_1994" -> "2497 Concat_1995" [label="[1]", style=dashed]; +"2497 Concat_1995" -> "2498 Reshape_1996" [label="[4]", style=dashed]; +"2498 Reshape_1996" -> "2499 QuantizeLinear_2445_1" [label="[]", style=solid]; +"2498 Reshape_1996" -> "2524 Transpose_2017" [label="[]", style=solid]; +"2499 QuantizeLinear_2445_1" -> "2500 DequantizeLinear_2445_1" [label="[]", style=dashed]; +"2500 DequantizeLinear_2445_1" -> "2501 Transpose_1997" [label="[]", style=solid]; +"2501 Transpose_1997" -> "2530 MatMul_2021" [label="[]", style=solid]; +"2502 Shape_1998" -> "2504 Gather_2000" [label="[-1]", style=dashed]; +"2503 Constant_1999" -> "2504 Gather_2000" [label="[]", style=dashed]; +"2504 Gather_2000" -> "2516 Unsqueeze_2011" [label="[]", style=dashed]; +"2505 Shape_2001" -> "2507 Gather_2003" [label="[-1]", style=dashed]; +"2506 Constant_2002" -> "2507 Gather_2003" [label="[]", style=dashed]; +"2507 Gather_2003" -> "2518 Unsqueeze_2012" [label="[]", style=dashed]; +"2508 Shape_2004" -> "2510 Gather_2006" [label="[-1]", style=dashed]; +"2509 Constant_2005" -> "2510 Gather_2006" [label="[]", style=dashed]; +"2510 Gather_2006" -> "2512 Div_2008" [label="[]", style=dashed]; +"2511 Constant_2007" -> "2512 Div_2008" [label="[]", style=dashed]; +"2512 Div_2008" -> "2513 Cast_2009" [label="[]", style=dashed]; +"2513 Cast_2009" -> "2514 Cast_2010" [label="[]", style=dashed]; +"2514 Cast_2010" -> "2520 Unsqueeze_2013" [label="[]", style=dashed]; +"2515 Constant_nncf_2323" -> "2516 Unsqueeze_2011" [label="[1]", style=dashed]; +"2516 Unsqueeze_2011" -> "2521 Concat_2014" [label="[1]", style=dashed]; +"2517 Constant_nncf_2325" -> "2518 Unsqueeze_2012" [label="[1]", style=dashed]; +"2518 Unsqueeze_2012" -> "2521 Concat_2014" [label="[1]", style=dashed]; +"2519 Constant_nncf_2327" -> "2520 Unsqueeze_2013" [label="[1]", style=dashed]; +"2520 Unsqueeze_2013" -> "2521 Concat_2014" [label="[1]", style=dashed]; +"2521 Concat_2014" -> "2522 Reshape_2015" [label="[4]", style=dashed]; +"2522 Reshape_2015" -> "2523 Transpose_2016" [label="[]", style=solid]; +"2523 Transpose_2016" -> "2528 Unsqueeze_2019" [label="[]", style=solid]; +"2523 Transpose_2016" -> "2560 MatMul_2045" [label="[]", style=solid]; +"2524 Transpose_2017" -> "2526 Unsqueeze_2018" [label="[]", style=solid]; +"2525 Constant_nncf_2333" -> "2526 Unsqueeze_2018" [label="[1]", style=dashed]; +"2526 Unsqueeze_2018" -> "2529 Concat_2020" [label="[]", style=solid]; +"2527 Constant_nncf_2335" -> "2528 Unsqueeze_2019" [label="[1]", style=dashed]; +"2528 Unsqueeze_2019" -> "2529 Concat_2020" [label="[]", style=solid]; +"2529 Concat_2020" -> "3241 nncf_model_output_10" [label="[2, 1, 12, 8, 64]", style=solid]; +"2530 MatMul_2021" -> "2532 Div_2023" [label="[]", style=solid]; +"2531 Constant_2022" -> "2532 Div_2023" [label="[]", style=solid]; +"2532 Div_2023" -> "2533 Shape_2024" [label="[]", style=solid]; +"2532 Div_2023" -> "2536 Shape_2027" [label="[]", style=solid]; +"2532 Div_2023" -> "2550 Mul_2038" [label="[]", style=solid]; +"2533 Shape_2024" -> "2535 Gather_2026" [label="[-1]", style=dashed]; +"2534 Constant_2025" -> "2535 Gather_2026" [label="[]", style=dashed]; +"2535 Gather_2026" -> "2539 Sub_2030" [label="[]", style=dashed]; +"2536 Shape_2027" -> "2538 Gather_2029" [label="[-1]", style=dashed]; +"2537 Constant_2028" -> "2538 Gather_2029" [label="[]", style=dashed]; +"2538 Gather_2029" -> "2539 Sub_2030" [label="[]", style=dashed]; +"2538 Gather_2029" -> "2543 Unsqueeze_2032" [label="[]", style=dashed]; +"2538 Gather_2029" -> "2547 Unsqueeze_2035" [label="[]", style=dashed]; +"2539 Sub_2030" -> "2541 Unsqueeze_2031" [label="[]", style=dashed]; +"2540 Constant_nncf_2348" -> "2541 Unsqueeze_2031" [label="[1]", style=dashed]; +"2541 Unsqueeze_2031" -> "2545 Slice_2034" [label="[1]", style=dashed]; +"2542 Constant_nncf_2350" -> "2543 Unsqueeze_2032" [label="[1]", style=dashed]; +"2543 Unsqueeze_2032" -> "2545 Slice_2034" [label="[1]", style=dashed]; +"2544 Constant_2033" -> "2545 Slice_2034" [label="[1]", style=dashed]; +"2545 Slice_2034" -> "2549 Slice_2037" [label="[-1, -1, -1, -1]", style=solid]; +"2546 Constant_nncf_2354" -> "2547 Unsqueeze_2035" [label="[1]", style=dashed]; +"2547 Unsqueeze_2035" -> "2549 Slice_2037" [label="[1]", style=dashed]; +"2548 Constant_2036" -> "2549 Slice_2037" [label="[1]", style=dashed]; +"2549 Slice_2037" -> "2550 Mul_2038" [label="[-1, -1, -1, -1]", style=solid]; +"2549 Slice_2037" -> "2552 Sub_2040" [label="[-1, -1, -1, -1]", style=solid]; +"2550 Mul_2038" -> "2555 Sub_2043" [label="[]", style=solid]; +"2551 Constant_2039" -> "2552 Sub_2040" [label="[]", style=solid]; +"2552 Sub_2040" -> "2554 Mul_2042" [label="[-1, -1, -1, -1]", style=solid]; +"2553 Constant_2041" -> "2554 Mul_2042" [label="[]", style=solid]; +"2554 Mul_2042" -> "2555 Sub_2043" [label="[-1, -1, -1, -1]", style=solid]; +"2555 Sub_2043" -> "2556 Shape_nncf_2364" [label="[]", style=solid]; +"2555 Sub_2043" -> "2557 Flatten_nncf_2365" [label="[]", style=solid]; +"2556 Shape_nncf_2364" -> "2559 Reshape_nncf_2367" [label="[-1]", style=dashed]; +"2557 Flatten_nncf_2365" -> "2558 Softmax_2044" [label="[]", style=solid]; +"2558 Softmax_2044" -> "2559 Reshape_nncf_2367" [label="[]", style=solid]; +"2559 Reshape_nncf_2367" -> "2560 MatMul_2045" [label="[]", style=solid]; +"2560 MatMul_2045" -> "2561 QuantizeLinear_2502_1" [label="[]", style=solid]; +"2561 QuantizeLinear_2502_1" -> "2562 DequantizeLinear_2502_1" [label="[]", style=dashed]; +"2562 DequantizeLinear_2502_1" -> "2563 Transpose_2046" [label="[]", style=solid]; +"2563 Transpose_2046" -> "2564 Shape_2047" [label="[]", style=solid]; +"2563 Transpose_2046" -> "2567 Shape_2050" [label="[]", style=solid]; +"2563 Transpose_2046" -> "2570 Shape_2053" [label="[]", style=solid]; +"2563 Transpose_2046" -> "2573 Shape_2056" [label="[]", style=solid]; +"2563 Transpose_2046" -> "2584 Reshape_2064" [label="[]", style=solid]; +"2564 Shape_2047" -> "2566 Gather_2049" [label="[-1]", style=dashed]; +"2565 Constant_2048" -> "2566 Gather_2049" [label="[]", style=dashed]; +"2566 Gather_2049" -> "2578 Unsqueeze_2060" [label="[]", style=dashed]; +"2567 Shape_2050" -> "2569 Gather_2052" [label="[-1]", style=dashed]; +"2568 Constant_2051" -> "2569 Gather_2052" [label="[]", style=dashed]; +"2569 Gather_2052" -> "2580 Unsqueeze_2061" [label="[]", style=dashed]; +"2570 Shape_2053" -> "2572 Gather_2055" [label="[-1]", style=dashed]; +"2571 Constant_2054" -> "2572 Gather_2055" [label="[]", style=dashed]; +"2572 Gather_2055" -> "2576 Mul_2059" [label="[]", style=dashed]; +"2573 Shape_2056" -> "2575 Gather_2058" [label="[-1]", style=dashed]; +"2574 Constant_2057" -> "2575 Gather_2058" [label="[]", style=dashed]; +"2575 Gather_2058" -> "2576 Mul_2059" [label="[]", style=dashed]; +"2576 Mul_2059" -> "2582 Unsqueeze_2062" [label="[]", style=dashed]; +"2577 Constant_nncf_2383" -> "2578 Unsqueeze_2060" [label="[1]", style=dashed]; +"2578 Unsqueeze_2060" -> "2583 Concat_2063" [label="[1]", style=dashed]; +"2579 Constant_nncf_2385" -> "2580 Unsqueeze_2061" [label="[1]", style=dashed]; +"2580 Unsqueeze_2061" -> "2583 Concat_2063" [label="[1]", style=dashed]; +"2581 Constant_nncf_2387" -> "2582 Unsqueeze_2062" [label="[1]", style=dashed]; +"2582 Unsqueeze_2062" -> "2583 Concat_2063" [label="[1]", style=dashed]; +"2583 Concat_2063" -> "2584 Reshape_2064" [label="[3]", style=dashed]; +"2584 Reshape_2064" -> "2585 Shape_2065" [label="[]", style=solid]; +"2584 Reshape_2064" -> "2588 Shape_2068" [label="[]", style=solid]; +"2584 Reshape_2064" -> "2591 Shape_2071" [label="[]", style=solid]; +"2584 Reshape_2064" -> "2597 Reshape_2076" [label="[]", style=solid]; +"2585 Shape_2065" -> "2587 Gather_2067" [label="[-1]", style=dashed]; +"2586 Constant_2066" -> "2587 Gather_2067" [label="[]", style=dashed]; +"2587 Gather_2067" -> "2602 Unsqueeze_2078" [label="[]", style=dashed]; +"2588 Shape_2068" -> "2590 Gather_2070" [label="[-1]", style=dashed]; +"2589 Constant_2069" -> "2590 Gather_2070" [label="[]", style=dashed]; +"2590 Gather_2070" -> "2604 Unsqueeze_2079" [label="[]", style=dashed]; +"2591 Shape_2071" -> "2593 Gather_2073" [label="[-1]", style=dashed]; +"2592 Constant_2072" -> "2593 Gather_2073" [label="[]", style=dashed]; +"2593 Gather_2073" -> "2595 Unsqueeze_2074" [label="[]", style=dashed]; +"2594 Constant_nncf_2400" -> "2595 Unsqueeze_2074" [label="[1]", style=dashed]; +"2595 Unsqueeze_2074" -> "2596 Concat_2075" [label="[1]", style=dashed]; +"2596 Concat_2075" -> "2597 Reshape_2076" [label="[2]", style=dashed]; +"2597 Reshape_2076" -> "2600 Gemm_2077" [label="[]", style=solid]; +"2598 QuantizeLinear_h.9.attn.c_proj.weight_1" -> "2599 DequantizeLinear_h.9.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"2599 DequantizeLinear_h.9.attn.c_proj.weight_1" -> "2600 Gemm_2077" [label="[768, 768]", style=solid]; +"2600 Gemm_2077" -> "2606 Reshape_2081" [label="[]", style=solid]; +"2601 Constant_nncf_2405" -> "2602 Unsqueeze_2078" [label="[1]", style=dashed]; +"2602 Unsqueeze_2078" -> "2605 Concat_2080" [label="[1]", style=dashed]; +"2603 Constant_nncf_2407" -> "2604 Unsqueeze_2079" [label="[1]", style=dashed]; +"2604 Unsqueeze_2079" -> "2605 Concat_2080" [label="[1]", style=dashed]; +"2605 Concat_2080" -> "2606 Reshape_2081" [label="[3]", style=dashed]; +"2606 Reshape_2081" -> "2607 Add_2082" [label="[]", style=solid]; +"2607 Add_2082" -> "2608 ReduceMean_2083" [label="[]", style=solid]; +"2607 Add_2082" -> "2609 Sub_2084" [label="[]", style=solid]; +"2607 Add_2082" -> "2680 Add_2141" [label="[]", style=solid]; +"2608 ReduceMean_2083" -> "2609 Sub_2084" [label="[]", style=solid]; +"2609 Sub_2084" -> "2611 Pow_2086" [label="[]", style=solid]; +"2609 Sub_2084" -> "2616 Div_2091" [label="[]", style=solid]; +"2610 Constant_2085" -> "2611 Pow_2086" [label="[]", style=solid]; +"2611 Pow_2086" -> "2612 ReduceMean_2087" [label="[]", style=solid]; +"2612 ReduceMean_2087" -> "2614 Add_2089" [label="[]", style=solid]; +"2613 Constant_2088" -> "2614 Add_2089" [label="[]", style=solid]; +"2614 Add_2089" -> "2615 Sqrt_2090" [label="[]", style=solid]; +"2615 Sqrt_2090" -> "2616 Div_2091" [label="[]", style=solid]; +"2616 Div_2091" -> "2617 Mul_2092" [label="[]", style=solid]; +"2617 Mul_2092" -> "2618 Add_2093" [label="[]", style=solid]; +"2618 Add_2093" -> "2619 QuantizeLinear_2554_1" [label="[]", style=solid]; +"2619 QuantizeLinear_2554_1" -> "2620 DequantizeLinear_2554_1" [label="[]", style=dashed]; +"2620 DequantizeLinear_2554_1" -> "2621 Shape_2094" [label="[]", style=solid]; +"2620 DequantizeLinear_2554_1" -> "2624 Shape_2097" [label="[]", style=solid]; +"2620 DequantizeLinear_2554_1" -> "2627 Shape_2100" [label="[]", style=solid]; +"2620 DequantizeLinear_2554_1" -> "2633 Reshape_2105" [label="[]", style=solid]; +"2621 Shape_2094" -> "2623 Gather_2096" [label="[-1]", style=dashed]; +"2622 Constant_2095" -> "2623 Gather_2096" [label="[]", style=dashed]; +"2623 Gather_2096" -> "2638 Unsqueeze_2107" [label="[]", style=dashed]; +"2624 Shape_2097" -> "2626 Gather_2099" [label="[-1]", style=dashed]; +"2625 Constant_2098" -> "2626 Gather_2099" [label="[]", style=dashed]; +"2626 Gather_2099" -> "2640 Unsqueeze_2108" [label="[]", style=dashed]; +"2627 Shape_2100" -> "2629 Gather_2102" [label="[-1]", style=dashed]; +"2628 Constant_2101" -> "2629 Gather_2102" [label="[]", style=dashed]; +"2629 Gather_2102" -> "2631 Unsqueeze_2103" [label="[]", style=dashed]; +"2630 Constant_nncf_2432" -> "2631 Unsqueeze_2103" [label="[1]", style=dashed]; +"2631 Unsqueeze_2103" -> "2632 Concat_2104" [label="[1]", style=dashed]; +"2632 Concat_2104" -> "2633 Reshape_2105" [label="[2]", style=dashed]; +"2633 Reshape_2105" -> "2636 Gemm_2106" [label="[]", style=solid]; +"2634 QuantizeLinear_h.9.mlp.c_fc.weight_1" -> "2635 DequantizeLinear_h.9.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"2635 DequantizeLinear_h.9.mlp.c_fc.weight_1" -> "2636 Gemm_2106" [label="[768, 3072]", style=solid]; +"2636 Gemm_2106" -> "2642 Reshape_2110" [label="[]", style=solid]; +"2637 Constant_nncf_2437" -> "2638 Unsqueeze_2107" [label="[1]", style=dashed]; +"2638 Unsqueeze_2107" -> "2641 Concat_2109" [label="[1]", style=dashed]; +"2639 Constant_nncf_2439" -> "2640 Unsqueeze_2108" [label="[1]", style=dashed]; +"2640 Unsqueeze_2108" -> "2641 Concat_2109" [label="[1]", style=dashed]; +"2641 Concat_2109" -> "2642 Reshape_2110" [label="[3]", style=dashed]; +"2642 Reshape_2110" -> "2644 Mul_2112" [label="[]", style=solid]; +"2642 Reshape_2110" -> "2646 Pow_2114" [label="[]", style=solid]; +"2642 Reshape_2110" -> "2649 Add_2117" [label="[]", style=solid]; +"2643 Constant_2111" -> "2644 Mul_2112" [label="[]", style=solid]; +"2644 Mul_2112" -> "2655 Mul_2123" [label="[]", style=solid]; +"2645 Constant_2113" -> "2646 Pow_2114" [label="[]", style=solid]; +"2646 Pow_2114" -> "2648 Mul_2116" [label="[]", style=solid]; +"2647 Constant_2115" -> "2648 Mul_2116" [label="[]", style=solid]; +"2648 Mul_2116" -> "2649 Add_2117" [label="[]", style=solid]; +"2649 Add_2117" -> "2651 Mul_2119" [label="[]", style=solid]; +"2650 Constant_2118" -> "2651 Mul_2119" [label="[]", style=solid]; +"2651 Mul_2119" -> "2652 Tanh_2120" [label="[]", style=solid]; +"2652 Tanh_2120" -> "2654 Add_2122" [label="[]", style=solid]; +"2653 Constant_2121" -> "2654 Add_2122" [label="[]", style=solid]; +"2654 Add_2122" -> "2655 Mul_2123" [label="[]", style=solid]; +"2655 Mul_2123" -> "2656 QuantizeLinear_2588_1" [label="[]", style=solid]; +"2656 QuantizeLinear_2588_1" -> "2657 DequantizeLinear_2588_1" [label="[]", style=dashed]; +"2657 DequantizeLinear_2588_1" -> "2658 Shape_2124" [label="[]", style=solid]; +"2657 DequantizeLinear_2588_1" -> "2661 Shape_2127" [label="[]", style=solid]; +"2657 DequantizeLinear_2588_1" -> "2664 Shape_2130" [label="[]", style=solid]; +"2657 DequantizeLinear_2588_1" -> "2670 Reshape_2135" [label="[]", style=solid]; +"2658 Shape_2124" -> "2660 Gather_2126" [label="[-1]", style=dashed]; +"2659 Constant_2125" -> "2660 Gather_2126" [label="[]", style=dashed]; +"2660 Gather_2126" -> "2675 Unsqueeze_2137" [label="[]", style=dashed]; +"2661 Shape_2127" -> "2663 Gather_2129" [label="[-1]", style=dashed]; +"2662 Constant_2128" -> "2663 Gather_2129" [label="[]", style=dashed]; +"2663 Gather_2129" -> "2677 Unsqueeze_2138" [label="[]", style=dashed]; +"2664 Shape_2130" -> "2666 Gather_2132" [label="[-1]", style=dashed]; +"2665 Constant_2131" -> "2666 Gather_2132" [label="[]", style=dashed]; +"2666 Gather_2132" -> "2668 Unsqueeze_2133" [label="[]", style=dashed]; +"2667 Constant_nncf_2465" -> "2668 Unsqueeze_2133" [label="[1]", style=dashed]; +"2668 Unsqueeze_2133" -> "2669 Concat_2134" [label="[1]", style=dashed]; +"2669 Concat_2134" -> "2670 Reshape_2135" [label="[2]", style=dashed]; +"2670 Reshape_2135" -> "2673 Gemm_2136" [label="[]", style=solid]; +"2671 QuantizeLinear_h.9.mlp.c_proj.weight_1" -> "2672 DequantizeLinear_h.9.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"2672 DequantizeLinear_h.9.mlp.c_proj.weight_1" -> "2673 Gemm_2136" [label="[3072, 768]", style=solid]; +"2673 Gemm_2136" -> "2679 Reshape_2140" [label="[]", style=solid]; +"2674 Constant_nncf_2470" -> "2675 Unsqueeze_2137" [label="[1]", style=dashed]; +"2675 Unsqueeze_2137" -> "2678 Concat_2139" [label="[1]", style=dashed]; +"2676 Constant_nncf_2472" -> "2677 Unsqueeze_2138" [label="[1]", style=dashed]; +"2677 Unsqueeze_2138" -> "2678 Concat_2139" [label="[1]", style=dashed]; +"2678 Concat_2139" -> "2679 Reshape_2140" [label="[3]", style=dashed]; +"2679 Reshape_2140" -> "2680 Add_2141" [label="[]", style=solid]; +"2680 Add_2141" -> "2681 ReduceMean_2142" [label="[]", style=solid]; +"2680 Add_2141" -> "2682 Sub_2143" [label="[]", style=solid]; +"2680 Add_2141" -> "2871 Add_2293" [label="[]", style=solid]; +"2681 ReduceMean_2142" -> "2682 Sub_2143" [label="[]", style=solid]; +"2682 Sub_2143" -> "2684 Pow_2145" [label="[]", style=solid]; +"2682 Sub_2143" -> "2689 Div_2150" [label="[]", style=solid]; +"2683 Constant_2144" -> "2684 Pow_2145" [label="[]", style=solid]; +"2684 Pow_2145" -> "2685 ReduceMean_2146" [label="[]", style=solid]; +"2685 ReduceMean_2146" -> "2687 Add_2148" [label="[]", style=solid]; +"2686 Constant_2147" -> "2687 Add_2148" [label="[]", style=solid]; +"2687 Add_2148" -> "2688 Sqrt_2149" [label="[]", style=solid]; +"2688 Sqrt_2149" -> "2689 Div_2150" [label="[]", style=solid]; +"2689 Div_2150" -> "2690 Mul_2151" [label="[]", style=solid]; +"2690 Mul_2151" -> "2691 Add_2152" [label="[]", style=solid]; +"2691 Add_2152" -> "2692 QuantizeLinear_2621_1" [label="[]", style=solid]; +"2692 QuantizeLinear_2621_1" -> "2693 DequantizeLinear_2621_1" [label="[]", style=dashed]; +"2693 DequantizeLinear_2621_1" -> "2694 Shape_2153" [label="[]", style=solid]; +"2693 DequantizeLinear_2621_1" -> "2697 Shape_2156" [label="[]", style=solid]; +"2693 DequantizeLinear_2621_1" -> "2700 Shape_2159" [label="[]", style=solid]; +"2693 DequantizeLinear_2621_1" -> "2706 Reshape_2164" [label="[]", style=solid]; +"2694 Shape_2153" -> "2696 Gather_2155" [label="[-1]", style=dashed]; +"2695 Constant_2154" -> "2696 Gather_2155" [label="[]", style=dashed]; +"2696 Gather_2155" -> "2711 Unsqueeze_2166" [label="[]", style=dashed]; +"2697 Shape_2156" -> "2699 Gather_2158" [label="[-1]", style=dashed]; +"2698 Constant_2157" -> "2699 Gather_2158" [label="[]", style=dashed]; +"2699 Gather_2158" -> "2713 Unsqueeze_2167" [label="[]", style=dashed]; +"2700 Shape_2159" -> "2702 Gather_2161" [label="[-1]", style=dashed]; +"2701 Constant_2160" -> "2702 Gather_2161" [label="[]", style=dashed]; +"2702 Gather_2161" -> "2704 Unsqueeze_2162" [label="[]", style=dashed]; +"2703 Constant_nncf_2497" -> "2704 Unsqueeze_2162" [label="[1]", style=dashed]; +"2704 Unsqueeze_2162" -> "2705 Concat_2163" [label="[1]", style=dashed]; +"2705 Concat_2163" -> "2706 Reshape_2164" [label="[2]", style=dashed]; +"2706 Reshape_2164" -> "2709 Gemm_2165" [label="[]", style=solid]; +"2707 QuantizeLinear_h.10.attn.c_attn.weight_1" -> "2708 DequantizeLinear_h.10.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"2708 DequantizeLinear_h.10.attn.c_attn.weight_1" -> "2709 Gemm_2165" [label="[768, 2304]", style=solid]; +"2709 Gemm_2165" -> "2715 Reshape_2169" [label="[]", style=solid]; +"2710 Constant_nncf_2502" -> "2711 Unsqueeze_2166" [label="[1]", style=dashed]; +"2711 Unsqueeze_2166" -> "2714 Concat_2168" [label="[1]", style=dashed]; +"2712 Constant_nncf_2504" -> "2713 Unsqueeze_2167" [label="[1]", style=dashed]; +"2713 Unsqueeze_2167" -> "2714 Concat_2168" [label="[1]", style=dashed]; +"2714 Concat_2168" -> "2715 Reshape_2169" [label="[3]", style=dashed]; +"2715 Reshape_2169" -> "2717 Split_2170" [label="[]", style=solid]; +"2716 Constant_nncf_2508" -> "2717 Split_2170" [label="[3]", style=dashed]; +"2717 Split_2170" -> "2718 QuantizeLinear_query.21_1" [label="[]", style=solid]; +"2717 Split_2170" -> "2720 Shape_2171" [label="[]", style=solid]; +"2717 Split_2170" -> "2723 Shape_2174" [label="[]", style=solid]; +"2717 Split_2170" -> "2726 Shape_2177" [label="[]", style=solid]; +"2717 Split_2170" -> "2742 Shape_2190" [label="[]", style=solid]; +"2717 Split_2170" -> "2745 Shape_2193" [label="[]", style=solid]; +"2717 Split_2170" -> "2748 Shape_2196" [label="[]", style=solid]; +"2717 Split_2170" -> "2762 Reshape_2207" [label="[]", style=solid]; +"2717 Split_2170" -> "2766 Shape_2209" [label="[]", style=solid]; +"2717 Split_2170" -> "2769 Shape_2212" [label="[]", style=solid]; +"2717 Split_2170" -> "2772 Shape_2215" [label="[]", style=solid]; +"2717 Split_2170" -> "2786 Reshape_2226" [label="[]", style=solid]; +"2718 QuantizeLinear_query.21_1" -> "2719 DequantizeLinear_query.21_1" [label="[]", style=dashed]; +"2719 DequantizeLinear_query.21_1" -> "2740 Reshape_2188" [label="[]", style=solid]; +"2720 Shape_2171" -> "2722 Gather_2173" [label="[-1]", style=dashed]; +"2721 Constant_2172" -> "2722 Gather_2173" [label="[]", style=dashed]; +"2722 Gather_2173" -> "2734 Unsqueeze_2184" [label="[]", style=dashed]; +"2723 Shape_2174" -> "2725 Gather_2176" [label="[-1]", style=dashed]; +"2724 Constant_2175" -> "2725 Gather_2176" [label="[]", style=dashed]; +"2725 Gather_2176" -> "2736 Unsqueeze_2185" [label="[]", style=dashed]; +"2726 Shape_2177" -> "2728 Gather_2179" [label="[-1]", style=dashed]; +"2727 Constant_2178" -> "2728 Gather_2179" [label="[]", style=dashed]; +"2728 Gather_2179" -> "2730 Div_2181" [label="[]", style=dashed]; +"2729 Constant_2180" -> "2730 Div_2181" [label="[]", style=dashed]; +"2730 Div_2181" -> "2731 Cast_2182" [label="[]", style=dashed]; +"2731 Cast_2182" -> "2732 Cast_2183" [label="[]", style=dashed]; +"2732 Cast_2183" -> "2738 Unsqueeze_2186" [label="[]", style=dashed]; +"2733 Constant_nncf_2523" -> "2734 Unsqueeze_2184" [label="[1]", style=dashed]; +"2734 Unsqueeze_2184" -> "2739 Concat_2187" [label="[1]", style=dashed]; +"2735 Constant_nncf_2525" -> "2736 Unsqueeze_2185" [label="[1]", style=dashed]; +"2736 Unsqueeze_2185" -> "2739 Concat_2187" [label="[1]", style=dashed]; +"2737 Constant_nncf_2527" -> "2738 Unsqueeze_2186" [label="[1]", style=dashed]; +"2738 Unsqueeze_2186" -> "2739 Concat_2187" [label="[1]", style=dashed]; +"2739 Concat_2187" -> "2740 Reshape_2188" [label="[4]", style=dashed]; +"2740 Reshape_2188" -> "2741 Transpose_2189" [label="[]", style=solid]; +"2741 Transpose_2189" -> "2794 MatMul_2232" [label="[]", style=solid]; +"2742 Shape_2190" -> "2744 Gather_2192" [label="[-1]", style=dashed]; +"2743 Constant_2191" -> "2744 Gather_2192" [label="[]", style=dashed]; +"2744 Gather_2192" -> "2756 Unsqueeze_2203" [label="[]", style=dashed]; +"2745 Shape_2193" -> "2747 Gather_2195" [label="[-1]", style=dashed]; +"2746 Constant_2194" -> "2747 Gather_2195" [label="[]", style=dashed]; +"2747 Gather_2195" -> "2758 Unsqueeze_2204" [label="[]", style=dashed]; +"2748 Shape_2196" -> "2750 Gather_2198" [label="[-1]", style=dashed]; +"2749 Constant_2197" -> "2750 Gather_2198" [label="[]", style=dashed]; +"2750 Gather_2198" -> "2752 Div_2200" [label="[]", style=dashed]; +"2751 Constant_2199" -> "2752 Div_2200" [label="[]", style=dashed]; +"2752 Div_2200" -> "2753 Cast_2201" [label="[]", style=dashed]; +"2753 Cast_2201" -> "2754 Cast_2202" [label="[]", style=dashed]; +"2754 Cast_2202" -> "2760 Unsqueeze_2205" [label="[]", style=dashed]; +"2755 Constant_nncf_2545" -> "2756 Unsqueeze_2203" [label="[1]", style=dashed]; +"2756 Unsqueeze_2203" -> "2761 Concat_2206" [label="[1]", style=dashed]; +"2757 Constant_nncf_2547" -> "2758 Unsqueeze_2204" [label="[1]", style=dashed]; +"2758 Unsqueeze_2204" -> "2761 Concat_2206" [label="[1]", style=dashed]; +"2759 Constant_nncf_2549" -> "2760 Unsqueeze_2205" [label="[1]", style=dashed]; +"2760 Unsqueeze_2205" -> "2761 Concat_2206" [label="[1]", style=dashed]; +"2761 Concat_2206" -> "2762 Reshape_2207" [label="[4]", style=dashed]; +"2762 Reshape_2207" -> "2763 QuantizeLinear_2686_1" [label="[]", style=solid]; +"2762 Reshape_2207" -> "2788 Transpose_2228" [label="[]", style=solid]; +"2763 QuantizeLinear_2686_1" -> "2764 DequantizeLinear_2686_1" [label="[]", style=dashed]; +"2764 DequantizeLinear_2686_1" -> "2765 Transpose_2208" [label="[]", style=solid]; +"2765 Transpose_2208" -> "2794 MatMul_2232" [label="[]", style=solid]; +"2766 Shape_2209" -> "2768 Gather_2211" [label="[-1]", style=dashed]; +"2767 Constant_2210" -> "2768 Gather_2211" [label="[]", style=dashed]; +"2768 Gather_2211" -> "2780 Unsqueeze_2222" [label="[]", style=dashed]; +"2769 Shape_2212" -> "2771 Gather_2214" [label="[-1]", style=dashed]; +"2770 Constant_2213" -> "2771 Gather_2214" [label="[]", style=dashed]; +"2771 Gather_2214" -> "2782 Unsqueeze_2223" [label="[]", style=dashed]; +"2772 Shape_2215" -> "2774 Gather_2217" [label="[-1]", style=dashed]; +"2773 Constant_2216" -> "2774 Gather_2217" [label="[]", style=dashed]; +"2774 Gather_2217" -> "2776 Div_2219" [label="[]", style=dashed]; +"2775 Constant_2218" -> "2776 Div_2219" [label="[]", style=dashed]; +"2776 Div_2219" -> "2777 Cast_2220" [label="[]", style=dashed]; +"2777 Cast_2220" -> "2778 Cast_2221" [label="[]", style=dashed]; +"2778 Cast_2221" -> "2784 Unsqueeze_2224" [label="[]", style=dashed]; +"2779 Constant_nncf_2567" -> "2780 Unsqueeze_2222" [label="[1]", style=dashed]; +"2780 Unsqueeze_2222" -> "2785 Concat_2225" [label="[1]", style=dashed]; +"2781 Constant_nncf_2569" -> "2782 Unsqueeze_2223" [label="[1]", style=dashed]; +"2782 Unsqueeze_2223" -> "2785 Concat_2225" [label="[1]", style=dashed]; +"2783 Constant_nncf_2571" -> "2784 Unsqueeze_2224" [label="[1]", style=dashed]; +"2784 Unsqueeze_2224" -> "2785 Concat_2225" [label="[1]", style=dashed]; +"2785 Concat_2225" -> "2786 Reshape_2226" [label="[4]", style=dashed]; +"2786 Reshape_2226" -> "2787 Transpose_2227" [label="[]", style=solid]; +"2787 Transpose_2227" -> "2792 Unsqueeze_2230" [label="[]", style=solid]; +"2787 Transpose_2227" -> "2824 MatMul_2256" [label="[]", style=solid]; +"2788 Transpose_2228" -> "2790 Unsqueeze_2229" [label="[]", style=solid]; +"2789 Constant_nncf_2577" -> "2790 Unsqueeze_2229" [label="[1]", style=dashed]; +"2790 Unsqueeze_2229" -> "2793 Concat_2231" [label="[]", style=solid]; +"2791 Constant_nncf_2579" -> "2792 Unsqueeze_2230" [label="[1]", style=dashed]; +"2792 Unsqueeze_2230" -> "2793 Concat_2231" [label="[]", style=solid]; +"2793 Concat_2231" -> "3242 nncf_model_output_11" [label="[2, 1, 12, 8, 64]", style=solid]; +"2794 MatMul_2232" -> "2796 Div_2234" [label="[]", style=solid]; +"2795 Constant_2233" -> "2796 Div_2234" [label="[]", style=solid]; +"2796 Div_2234" -> "2797 Shape_2235" [label="[]", style=solid]; +"2796 Div_2234" -> "2800 Shape_2238" [label="[]", style=solid]; +"2796 Div_2234" -> "2814 Mul_2249" [label="[]", style=solid]; +"2797 Shape_2235" -> "2799 Gather_2237" [label="[-1]", style=dashed]; +"2798 Constant_2236" -> "2799 Gather_2237" [label="[]", style=dashed]; +"2799 Gather_2237" -> "2803 Sub_2241" [label="[]", style=dashed]; +"2800 Shape_2238" -> "2802 Gather_2240" [label="[-1]", style=dashed]; +"2801 Constant_2239" -> "2802 Gather_2240" [label="[]", style=dashed]; +"2802 Gather_2240" -> "2803 Sub_2241" [label="[]", style=dashed]; +"2802 Gather_2240" -> "2807 Unsqueeze_2243" [label="[]", style=dashed]; +"2802 Gather_2240" -> "2811 Unsqueeze_2246" [label="[]", style=dashed]; +"2803 Sub_2241" -> "2805 Unsqueeze_2242" [label="[]", style=dashed]; +"2804 Constant_nncf_2592" -> "2805 Unsqueeze_2242" [label="[1]", style=dashed]; +"2805 Unsqueeze_2242" -> "2809 Slice_2245" [label="[1]", style=dashed]; +"2806 Constant_nncf_2594" -> "2807 Unsqueeze_2243" [label="[1]", style=dashed]; +"2807 Unsqueeze_2243" -> "2809 Slice_2245" [label="[1]", style=dashed]; +"2808 Constant_2244" -> "2809 Slice_2245" [label="[1]", style=dashed]; +"2809 Slice_2245" -> "2813 Slice_2248" [label="[-1, -1, -1, -1]", style=solid]; +"2810 Constant_nncf_2598" -> "2811 Unsqueeze_2246" [label="[1]", style=dashed]; +"2811 Unsqueeze_2246" -> "2813 Slice_2248" [label="[1]", style=dashed]; +"2812 Constant_2247" -> "2813 Slice_2248" [label="[1]", style=dashed]; +"2813 Slice_2248" -> "2814 Mul_2249" [label="[-1, -1, -1, -1]", style=solid]; +"2813 Slice_2248" -> "2816 Sub_2251" [label="[-1, -1, -1, -1]", style=solid]; +"2814 Mul_2249" -> "2819 Sub_2254" [label="[]", style=solid]; +"2815 Constant_2250" -> "2816 Sub_2251" [label="[]", style=solid]; +"2816 Sub_2251" -> "2818 Mul_2253" [label="[-1, -1, -1, -1]", style=solid]; +"2817 Constant_2252" -> "2818 Mul_2253" [label="[]", style=solid]; +"2818 Mul_2253" -> "2819 Sub_2254" [label="[-1, -1, -1, -1]", style=solid]; +"2819 Sub_2254" -> "2820 Shape_nncf_2608" [label="[]", style=solid]; +"2819 Sub_2254" -> "2821 Flatten_nncf_2609" [label="[]", style=solid]; +"2820 Shape_nncf_2608" -> "2823 Reshape_nncf_2611" [label="[-1]", style=dashed]; +"2821 Flatten_nncf_2609" -> "2822 Softmax_2255" [label="[]", style=solid]; +"2822 Softmax_2255" -> "2823 Reshape_nncf_2611" [label="[]", style=solid]; +"2823 Reshape_nncf_2611" -> "2824 MatMul_2256" [label="[]", style=solid]; +"2824 MatMul_2256" -> "2825 QuantizeLinear_2743_1" [label="[]", style=solid]; +"2825 QuantizeLinear_2743_1" -> "2826 DequantizeLinear_2743_1" [label="[]", style=dashed]; +"2826 DequantizeLinear_2743_1" -> "2827 Transpose_2257" [label="[]", style=solid]; +"2827 Transpose_2257" -> "2828 Shape_2258" [label="[]", style=solid]; +"2827 Transpose_2257" -> "2831 Shape_2261" [label="[]", style=solid]; +"2827 Transpose_2257" -> "2834 Shape_2264" [label="[]", style=solid]; +"2827 Transpose_2257" -> "2837 Shape_2267" [label="[]", style=solid]; +"2827 Transpose_2257" -> "2848 Reshape_2275" [label="[]", style=solid]; +"2828 Shape_2258" -> "2830 Gather_2260" [label="[-1]", style=dashed]; +"2829 Constant_2259" -> "2830 Gather_2260" [label="[]", style=dashed]; +"2830 Gather_2260" -> "2842 Unsqueeze_2271" [label="[]", style=dashed]; +"2831 Shape_2261" -> "2833 Gather_2263" [label="[-1]", style=dashed]; +"2832 Constant_2262" -> "2833 Gather_2263" [label="[]", style=dashed]; +"2833 Gather_2263" -> "2844 Unsqueeze_2272" [label="[]", style=dashed]; +"2834 Shape_2264" -> "2836 Gather_2266" [label="[-1]", style=dashed]; +"2835 Constant_2265" -> "2836 Gather_2266" [label="[]", style=dashed]; +"2836 Gather_2266" -> "2840 Mul_2270" [label="[]", style=dashed]; +"2837 Shape_2267" -> "2839 Gather_2269" [label="[-1]", style=dashed]; +"2838 Constant_2268" -> "2839 Gather_2269" [label="[]", style=dashed]; +"2839 Gather_2269" -> "2840 Mul_2270" [label="[]", style=dashed]; +"2840 Mul_2270" -> "2846 Unsqueeze_2273" [label="[]", style=dashed]; +"2841 Constant_nncf_2627" -> "2842 Unsqueeze_2271" [label="[1]", style=dashed]; +"2842 Unsqueeze_2271" -> "2847 Concat_2274" [label="[1]", style=dashed]; +"2843 Constant_nncf_2629" -> "2844 Unsqueeze_2272" [label="[1]", style=dashed]; +"2844 Unsqueeze_2272" -> "2847 Concat_2274" [label="[1]", style=dashed]; +"2845 Constant_nncf_2631" -> "2846 Unsqueeze_2273" [label="[1]", style=dashed]; +"2846 Unsqueeze_2273" -> "2847 Concat_2274" [label="[1]", style=dashed]; +"2847 Concat_2274" -> "2848 Reshape_2275" [label="[3]", style=dashed]; +"2848 Reshape_2275" -> "2849 Shape_2276" [label="[]", style=solid]; +"2848 Reshape_2275" -> "2852 Shape_2279" [label="[]", style=solid]; +"2848 Reshape_2275" -> "2855 Shape_2282" [label="[]", style=solid]; +"2848 Reshape_2275" -> "2861 Reshape_2287" [label="[]", style=solid]; +"2849 Shape_2276" -> "2851 Gather_2278" [label="[-1]", style=dashed]; +"2850 Constant_2277" -> "2851 Gather_2278" [label="[]", style=dashed]; +"2851 Gather_2278" -> "2866 Unsqueeze_2289" [label="[]", style=dashed]; +"2852 Shape_2279" -> "2854 Gather_2281" [label="[-1]", style=dashed]; +"2853 Constant_2280" -> "2854 Gather_2281" [label="[]", style=dashed]; +"2854 Gather_2281" -> "2868 Unsqueeze_2290" [label="[]", style=dashed]; +"2855 Shape_2282" -> "2857 Gather_2284" [label="[-1]", style=dashed]; +"2856 Constant_2283" -> "2857 Gather_2284" [label="[]", style=dashed]; +"2857 Gather_2284" -> "2859 Unsqueeze_2285" [label="[]", style=dashed]; +"2858 Constant_nncf_2644" -> "2859 Unsqueeze_2285" [label="[1]", style=dashed]; +"2859 Unsqueeze_2285" -> "2860 Concat_2286" [label="[1]", style=dashed]; +"2860 Concat_2286" -> "2861 Reshape_2287" [label="[2]", style=dashed]; +"2861 Reshape_2287" -> "2864 Gemm_2288" [label="[]", style=solid]; +"2862 QuantizeLinear_h.10.attn.c_proj.weight_1" -> "2863 DequantizeLinear_h.10.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"2863 DequantizeLinear_h.10.attn.c_proj.weight_1" -> "2864 Gemm_2288" [label="[768, 768]", style=solid]; +"2864 Gemm_2288" -> "2870 Reshape_2292" [label="[]", style=solid]; +"2865 Constant_nncf_2649" -> "2866 Unsqueeze_2289" [label="[1]", style=dashed]; +"2866 Unsqueeze_2289" -> "2869 Concat_2291" [label="[1]", style=dashed]; +"2867 Constant_nncf_2651" -> "2868 Unsqueeze_2290" [label="[1]", style=dashed]; +"2868 Unsqueeze_2290" -> "2869 Concat_2291" [label="[1]", style=dashed]; +"2869 Concat_2291" -> "2870 Reshape_2292" [label="[3]", style=dashed]; +"2870 Reshape_2292" -> "2871 Add_2293" [label="[]", style=solid]; +"2871 Add_2293" -> "2872 ReduceMean_2294" [label="[]", style=solid]; +"2871 Add_2293" -> "2873 Sub_2295" [label="[]", style=solid]; +"2871 Add_2293" -> "2944 Add_2352" [label="[]", style=solid]; +"2872 ReduceMean_2294" -> "2873 Sub_2295" [label="[]", style=solid]; +"2873 Sub_2295" -> "2875 Pow_2297" [label="[]", style=solid]; +"2873 Sub_2295" -> "2880 Div_2302" [label="[]", style=solid]; +"2874 Constant_2296" -> "2875 Pow_2297" [label="[]", style=solid]; +"2875 Pow_2297" -> "2876 ReduceMean_2298" [label="[]", style=solid]; +"2876 ReduceMean_2298" -> "2878 Add_2300" [label="[]", style=solid]; +"2877 Constant_2299" -> "2878 Add_2300" [label="[]", style=solid]; +"2878 Add_2300" -> "2879 Sqrt_2301" [label="[]", style=solid]; +"2879 Sqrt_2301" -> "2880 Div_2302" [label="[]", style=solid]; +"2880 Div_2302" -> "2881 Mul_2303" [label="[]", style=solid]; +"2881 Mul_2303" -> "2882 Add_2304" [label="[]", style=solid]; +"2882 Add_2304" -> "2883 QuantizeLinear_2795_1" [label="[]", style=solid]; +"2883 QuantizeLinear_2795_1" -> "2884 DequantizeLinear_2795_1" [label="[]", style=dashed]; +"2884 DequantizeLinear_2795_1" -> "2885 Shape_2305" [label="[]", style=solid]; +"2884 DequantizeLinear_2795_1" -> "2888 Shape_2308" [label="[]", style=solid]; +"2884 DequantizeLinear_2795_1" -> "2891 Shape_2311" [label="[]", style=solid]; +"2884 DequantizeLinear_2795_1" -> "2897 Reshape_2316" [label="[]", style=solid]; +"2885 Shape_2305" -> "2887 Gather_2307" [label="[-1]", style=dashed]; +"2886 Constant_2306" -> "2887 Gather_2307" [label="[]", style=dashed]; +"2887 Gather_2307" -> "2902 Unsqueeze_2318" [label="[]", style=dashed]; +"2888 Shape_2308" -> "2890 Gather_2310" [label="[-1]", style=dashed]; +"2889 Constant_2309" -> "2890 Gather_2310" [label="[]", style=dashed]; +"2890 Gather_2310" -> "2904 Unsqueeze_2319" [label="[]", style=dashed]; +"2891 Shape_2311" -> "2893 Gather_2313" [label="[-1]", style=dashed]; +"2892 Constant_2312" -> "2893 Gather_2313" [label="[]", style=dashed]; +"2893 Gather_2313" -> "2895 Unsqueeze_2314" [label="[]", style=dashed]; +"2894 Constant_nncf_2676" -> "2895 Unsqueeze_2314" [label="[1]", style=dashed]; +"2895 Unsqueeze_2314" -> "2896 Concat_2315" [label="[1]", style=dashed]; +"2896 Concat_2315" -> "2897 Reshape_2316" [label="[2]", style=dashed]; +"2897 Reshape_2316" -> "2900 Gemm_2317" [label="[]", style=solid]; +"2898 QuantizeLinear_h.10.mlp.c_fc.weight_1" -> "2899 DequantizeLinear_h.10.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"2899 DequantizeLinear_h.10.mlp.c_fc.weight_1" -> "2900 Gemm_2317" [label="[768, 3072]", style=solid]; +"2900 Gemm_2317" -> "2906 Reshape_2321" [label="[]", style=solid]; +"2901 Constant_nncf_2681" -> "2902 Unsqueeze_2318" [label="[1]", style=dashed]; +"2902 Unsqueeze_2318" -> "2905 Concat_2320" [label="[1]", style=dashed]; +"2903 Constant_nncf_2683" -> "2904 Unsqueeze_2319" [label="[1]", style=dashed]; +"2904 Unsqueeze_2319" -> "2905 Concat_2320" [label="[1]", style=dashed]; +"2905 Concat_2320" -> "2906 Reshape_2321" [label="[3]", style=dashed]; +"2906 Reshape_2321" -> "2908 Mul_2323" [label="[]", style=solid]; +"2906 Reshape_2321" -> "2910 Pow_2325" [label="[]", style=solid]; +"2906 Reshape_2321" -> "2913 Add_2328" [label="[]", style=solid]; +"2907 Constant_2322" -> "2908 Mul_2323" [label="[]", style=solid]; +"2908 Mul_2323" -> "2919 Mul_2334" [label="[]", style=solid]; +"2909 Constant_2324" -> "2910 Pow_2325" [label="[]", style=solid]; +"2910 Pow_2325" -> "2912 Mul_2327" [label="[]", style=solid]; +"2911 Constant_2326" -> "2912 Mul_2327" [label="[]", style=solid]; +"2912 Mul_2327" -> "2913 Add_2328" [label="[]", style=solid]; +"2913 Add_2328" -> "2915 Mul_2330" [label="[]", style=solid]; +"2914 Constant_2329" -> "2915 Mul_2330" [label="[]", style=solid]; +"2915 Mul_2330" -> "2916 Tanh_2331" [label="[]", style=solid]; +"2916 Tanh_2331" -> "2918 Add_2333" [label="[]", style=solid]; +"2917 Constant_2332" -> "2918 Add_2333" [label="[]", style=solid]; +"2918 Add_2333" -> "2919 Mul_2334" [label="[]", style=solid]; +"2919 Mul_2334" -> "2920 QuantizeLinear_2829_1" [label="[]", style=solid]; +"2920 QuantizeLinear_2829_1" -> "2921 DequantizeLinear_2829_1" [label="[]", style=dashed]; +"2921 DequantizeLinear_2829_1" -> "2922 Shape_2335" [label="[]", style=solid]; +"2921 DequantizeLinear_2829_1" -> "2925 Shape_2338" [label="[]", style=solid]; +"2921 DequantizeLinear_2829_1" -> "2928 Shape_2341" [label="[]", style=solid]; +"2921 DequantizeLinear_2829_1" -> "2934 Reshape_2346" [label="[]", style=solid]; +"2922 Shape_2335" -> "2924 Gather_2337" [label="[-1]", style=dashed]; +"2923 Constant_2336" -> "2924 Gather_2337" [label="[]", style=dashed]; +"2924 Gather_2337" -> "2939 Unsqueeze_2348" [label="[]", style=dashed]; +"2925 Shape_2338" -> "2927 Gather_2340" [label="[-1]", style=dashed]; +"2926 Constant_2339" -> "2927 Gather_2340" [label="[]", style=dashed]; +"2927 Gather_2340" -> "2941 Unsqueeze_2349" [label="[]", style=dashed]; +"2928 Shape_2341" -> "2930 Gather_2343" [label="[-1]", style=dashed]; +"2929 Constant_2342" -> "2930 Gather_2343" [label="[]", style=dashed]; +"2930 Gather_2343" -> "2932 Unsqueeze_2344" [label="[]", style=dashed]; +"2931 Constant_nncf_2709" -> "2932 Unsqueeze_2344" [label="[1]", style=dashed]; +"2932 Unsqueeze_2344" -> "2933 Concat_2345" [label="[1]", style=dashed]; +"2933 Concat_2345" -> "2934 Reshape_2346" [label="[2]", style=dashed]; +"2934 Reshape_2346" -> "2937 Gemm_2347" [label="[]", style=solid]; +"2935 QuantizeLinear_h.10.mlp.c_proj.weight_1" -> "2936 DequantizeLinear_h.10.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"2936 DequantizeLinear_h.10.mlp.c_proj.weight_1" -> "2937 Gemm_2347" [label="[3072, 768]", style=solid]; +"2937 Gemm_2347" -> "2943 Reshape_2351" [label="[]", style=solid]; +"2938 Constant_nncf_2714" -> "2939 Unsqueeze_2348" [label="[1]", style=dashed]; +"2939 Unsqueeze_2348" -> "2942 Concat_2350" [label="[1]", style=dashed]; +"2940 Constant_nncf_2716" -> "2941 Unsqueeze_2349" [label="[1]", style=dashed]; +"2941 Unsqueeze_2349" -> "2942 Concat_2350" [label="[1]", style=dashed]; +"2942 Concat_2350" -> "2943 Reshape_2351" [label="[3]", style=dashed]; +"2943 Reshape_2351" -> "2944 Add_2352" [label="[]", style=solid]; +"2944 Add_2352" -> "2945 ReduceMean_2353" [label="[]", style=solid]; +"2944 Add_2352" -> "2946 Sub_2354" [label="[]", style=solid]; +"2944 Add_2352" -> "3135 Add_2504" [label="[]", style=solid]; +"2945 ReduceMean_2353" -> "2946 Sub_2354" [label="[]", style=solid]; +"2946 Sub_2354" -> "2948 Pow_2356" [label="[]", style=solid]; +"2946 Sub_2354" -> "2953 Div_2361" [label="[]", style=solid]; +"2947 Constant_2355" -> "2948 Pow_2356" [label="[]", style=solid]; +"2948 Pow_2356" -> "2949 ReduceMean_2357" [label="[]", style=solid]; +"2949 ReduceMean_2357" -> "2951 Add_2359" [label="[]", style=solid]; +"2950 Constant_2358" -> "2951 Add_2359" [label="[]", style=solid]; +"2951 Add_2359" -> "2952 Sqrt_2360" [label="[]", style=solid]; +"2952 Sqrt_2360" -> "2953 Div_2361" [label="[]", style=solid]; +"2953 Div_2361" -> "2954 Mul_2362" [label="[]", style=solid]; +"2954 Mul_2362" -> "2955 Add_2363" [label="[]", style=solid]; +"2955 Add_2363" -> "2956 QuantizeLinear_2862_1" [label="[]", style=solid]; +"2956 QuantizeLinear_2862_1" -> "2957 DequantizeLinear_2862_1" [label="[]", style=dashed]; +"2957 DequantizeLinear_2862_1" -> "2958 Shape_2364" [label="[]", style=solid]; +"2957 DequantizeLinear_2862_1" -> "2961 Shape_2367" [label="[]", style=solid]; +"2957 DequantizeLinear_2862_1" -> "2964 Shape_2370" [label="[]", style=solid]; +"2957 DequantizeLinear_2862_1" -> "2970 Reshape_2375" [label="[]", style=solid]; +"2958 Shape_2364" -> "2960 Gather_2366" [label="[-1]", style=dashed]; +"2959 Constant_2365" -> "2960 Gather_2366" [label="[]", style=dashed]; +"2960 Gather_2366" -> "2975 Unsqueeze_2377" [label="[]", style=dashed]; +"2961 Shape_2367" -> "2963 Gather_2369" [label="[-1]", style=dashed]; +"2962 Constant_2368" -> "2963 Gather_2369" [label="[]", style=dashed]; +"2963 Gather_2369" -> "2977 Unsqueeze_2378" [label="[]", style=dashed]; +"2964 Shape_2370" -> "2966 Gather_2372" [label="[-1]", style=dashed]; +"2965 Constant_2371" -> "2966 Gather_2372" [label="[]", style=dashed]; +"2966 Gather_2372" -> "2968 Unsqueeze_2373" [label="[]", style=dashed]; +"2967 Constant_nncf_2741" -> "2968 Unsqueeze_2373" [label="[1]", style=dashed]; +"2968 Unsqueeze_2373" -> "2969 Concat_2374" [label="[1]", style=dashed]; +"2969 Concat_2374" -> "2970 Reshape_2375" [label="[2]", style=dashed]; +"2970 Reshape_2375" -> "2973 Gemm_2376" [label="[]", style=solid]; +"2971 QuantizeLinear_h.11.attn.c_attn.weight_1" -> "2972 DequantizeLinear_h.11.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"2972 DequantizeLinear_h.11.attn.c_attn.weight_1" -> "2973 Gemm_2376" [label="[768, 2304]", style=solid]; +"2973 Gemm_2376" -> "2979 Reshape_2380" [label="[]", style=solid]; +"2974 Constant_nncf_2746" -> "2975 Unsqueeze_2377" [label="[1]", style=dashed]; +"2975 Unsqueeze_2377" -> "2978 Concat_2379" [label="[1]", style=dashed]; +"2976 Constant_nncf_2748" -> "2977 Unsqueeze_2378" [label="[1]", style=dashed]; +"2977 Unsqueeze_2378" -> "2978 Concat_2379" [label="[1]", style=dashed]; +"2978 Concat_2379" -> "2979 Reshape_2380" [label="[3]", style=dashed]; +"2979 Reshape_2380" -> "2981 Split_2381" [label="[]", style=solid]; +"2980 Constant_nncf_2752" -> "2981 Split_2381" [label="[3]", style=dashed]; +"2981 Split_2381" -> "2982 QuantizeLinear_query.23_1" [label="[]", style=solid]; +"2981 Split_2381" -> "2984 Shape_2382" [label="[]", style=solid]; +"2981 Split_2381" -> "2987 Shape_2385" [label="[]", style=solid]; +"2981 Split_2381" -> "2990 Shape_2388" [label="[]", style=solid]; +"2981 Split_2381" -> "3006 Shape_2401" [label="[]", style=solid]; +"2981 Split_2381" -> "3009 Shape_2404" [label="[]", style=solid]; +"2981 Split_2381" -> "3012 Shape_2407" [label="[]", style=solid]; +"2981 Split_2381" -> "3026 Reshape_2418" [label="[]", style=solid]; +"2981 Split_2381" -> "3030 Shape_2420" [label="[]", style=solid]; +"2981 Split_2381" -> "3033 Shape_2423" [label="[]", style=solid]; +"2981 Split_2381" -> "3036 Shape_2426" [label="[]", style=solid]; +"2981 Split_2381" -> "3050 Reshape_2437" [label="[]", style=solid]; +"2982 QuantizeLinear_query.23_1" -> "2983 DequantizeLinear_query.23_1" [label="[]", style=dashed]; +"2983 DequantizeLinear_query.23_1" -> "3004 Reshape_2399" [label="[]", style=solid]; +"2984 Shape_2382" -> "2986 Gather_2384" [label="[-1]", style=dashed]; +"2985 Constant_2383" -> "2986 Gather_2384" [label="[]", style=dashed]; +"2986 Gather_2384" -> "2998 Unsqueeze_2395" [label="[]", style=dashed]; +"2987 Shape_2385" -> "2989 Gather_2387" [label="[-1]", style=dashed]; +"2988 Constant_2386" -> "2989 Gather_2387" [label="[]", style=dashed]; +"2989 Gather_2387" -> "3000 Unsqueeze_2396" [label="[]", style=dashed]; +"2990 Shape_2388" -> "2992 Gather_2390" [label="[-1]", style=dashed]; +"2991 Constant_2389" -> "2992 Gather_2390" [label="[]", style=dashed]; +"2992 Gather_2390" -> "2994 Div_2392" [label="[]", style=dashed]; +"2993 Constant_2391" -> "2994 Div_2392" [label="[]", style=dashed]; +"2994 Div_2392" -> "2995 Cast_2393" [label="[]", style=dashed]; +"2995 Cast_2393" -> "2996 Cast_2394" [label="[]", style=dashed]; +"2996 Cast_2394" -> "3002 Unsqueeze_2397" [label="[]", style=dashed]; +"2997 Constant_nncf_2767" -> "2998 Unsqueeze_2395" [label="[1]", style=dashed]; +"2998 Unsqueeze_2395" -> "3003 Concat_2398" [label="[1]", style=dashed]; +"2999 Constant_nncf_2769" -> "3000 Unsqueeze_2396" [label="[1]", style=dashed]; +"3000 Unsqueeze_2396" -> "3003 Concat_2398" [label="[1]", style=dashed]; +"3001 Constant_nncf_2771" -> "3002 Unsqueeze_2397" [label="[1]", style=dashed]; +"3002 Unsqueeze_2397" -> "3003 Concat_2398" [label="[1]", style=dashed]; +"3003 Concat_2398" -> "3004 Reshape_2399" [label="[4]", style=dashed]; +"3004 Reshape_2399" -> "3005 Transpose_2400" [label="[]", style=solid]; +"3005 Transpose_2400" -> "3058 MatMul_2443" [label="[]", style=solid]; +"3006 Shape_2401" -> "3008 Gather_2403" [label="[-1]", style=dashed]; +"3007 Constant_2402" -> "3008 Gather_2403" [label="[]", style=dashed]; +"3008 Gather_2403" -> "3020 Unsqueeze_2414" [label="[]", style=dashed]; +"3009 Shape_2404" -> "3011 Gather_2406" [label="[-1]", style=dashed]; +"3010 Constant_2405" -> "3011 Gather_2406" [label="[]", style=dashed]; +"3011 Gather_2406" -> "3022 Unsqueeze_2415" [label="[]", style=dashed]; +"3012 Shape_2407" -> "3014 Gather_2409" [label="[-1]", style=dashed]; +"3013 Constant_2408" -> "3014 Gather_2409" [label="[]", style=dashed]; +"3014 Gather_2409" -> "3016 Div_2411" [label="[]", style=dashed]; +"3015 Constant_2410" -> "3016 Div_2411" [label="[]", style=dashed]; +"3016 Div_2411" -> "3017 Cast_2412" [label="[]", style=dashed]; +"3017 Cast_2412" -> "3018 Cast_2413" [label="[]", style=dashed]; +"3018 Cast_2413" -> "3024 Unsqueeze_2416" [label="[]", style=dashed]; +"3019 Constant_nncf_2789" -> "3020 Unsqueeze_2414" [label="[1]", style=dashed]; +"3020 Unsqueeze_2414" -> "3025 Concat_2417" [label="[1]", style=dashed]; +"3021 Constant_nncf_2791" -> "3022 Unsqueeze_2415" [label="[1]", style=dashed]; +"3022 Unsqueeze_2415" -> "3025 Concat_2417" [label="[1]", style=dashed]; +"3023 Constant_nncf_2793" -> "3024 Unsqueeze_2416" [label="[1]", style=dashed]; +"3024 Unsqueeze_2416" -> "3025 Concat_2417" [label="[1]", style=dashed]; +"3025 Concat_2417" -> "3026 Reshape_2418" [label="[4]", style=dashed]; +"3026 Reshape_2418" -> "3027 QuantizeLinear_2927_1" [label="[]", style=solid]; +"3026 Reshape_2418" -> "3052 Transpose_2439" [label="[]", style=solid]; +"3027 QuantizeLinear_2927_1" -> "3028 DequantizeLinear_2927_1" [label="[]", style=dashed]; +"3028 DequantizeLinear_2927_1" -> "3029 Transpose_2419" [label="[]", style=solid]; +"3029 Transpose_2419" -> "3058 MatMul_2443" [label="[]", style=solid]; +"3030 Shape_2420" -> "3032 Gather_2422" [label="[-1]", style=dashed]; +"3031 Constant_2421" -> "3032 Gather_2422" [label="[]", style=dashed]; +"3032 Gather_2422" -> "3044 Unsqueeze_2433" [label="[]", style=dashed]; +"3033 Shape_2423" -> "3035 Gather_2425" [label="[-1]", style=dashed]; +"3034 Constant_2424" -> "3035 Gather_2425" [label="[]", style=dashed]; +"3035 Gather_2425" -> "3046 Unsqueeze_2434" [label="[]", style=dashed]; +"3036 Shape_2426" -> "3038 Gather_2428" [label="[-1]", style=dashed]; +"3037 Constant_2427" -> "3038 Gather_2428" [label="[]", style=dashed]; +"3038 Gather_2428" -> "3040 Div_2430" [label="[]", style=dashed]; +"3039 Constant_2429" -> "3040 Div_2430" [label="[]", style=dashed]; +"3040 Div_2430" -> "3041 Cast_2431" [label="[]", style=dashed]; +"3041 Cast_2431" -> "3042 Cast_2432" [label="[]", style=dashed]; +"3042 Cast_2432" -> "3048 Unsqueeze_2435" [label="[]", style=dashed]; +"3043 Constant_nncf_2811" -> "3044 Unsqueeze_2433" [label="[1]", style=dashed]; +"3044 Unsqueeze_2433" -> "3049 Concat_2436" [label="[1]", style=dashed]; +"3045 Constant_nncf_2813" -> "3046 Unsqueeze_2434" [label="[1]", style=dashed]; +"3046 Unsqueeze_2434" -> "3049 Concat_2436" [label="[1]", style=dashed]; +"3047 Constant_nncf_2815" -> "3048 Unsqueeze_2435" [label="[1]", style=dashed]; +"3048 Unsqueeze_2435" -> "3049 Concat_2436" [label="[1]", style=dashed]; +"3049 Concat_2436" -> "3050 Reshape_2437" [label="[4]", style=dashed]; +"3050 Reshape_2437" -> "3051 Transpose_2438" [label="[]", style=solid]; +"3051 Transpose_2438" -> "3056 Unsqueeze_2441" [label="[]", style=solid]; +"3051 Transpose_2438" -> "3088 MatMul_2467" [label="[]", style=solid]; +"3052 Transpose_2439" -> "3054 Unsqueeze_2440" [label="[]", style=solid]; +"3053 Constant_nncf_2821" -> "3054 Unsqueeze_2440" [label="[1]", style=dashed]; +"3054 Unsqueeze_2440" -> "3057 Concat_2442" [label="[]", style=solid]; +"3055 Constant_nncf_2823" -> "3056 Unsqueeze_2441" [label="[1]", style=dashed]; +"3056 Unsqueeze_2441" -> "3057 Concat_2442" [label="[]", style=solid]; +"3057 Concat_2442" -> "3243 nncf_model_output_12" [label="[2, 1, 12, 8, 64]", style=solid]; +"3058 MatMul_2443" -> "3060 Div_2445" [label="[]", style=solid]; +"3059 Constant_2444" -> "3060 Div_2445" [label="[]", style=solid]; +"3060 Div_2445" -> "3061 Shape_2446" [label="[]", style=solid]; +"3060 Div_2445" -> "3064 Shape_2449" [label="[]", style=solid]; +"3060 Div_2445" -> "3078 Mul_2460" [label="[]", style=solid]; +"3061 Shape_2446" -> "3063 Gather_2448" [label="[-1]", style=dashed]; +"3062 Constant_2447" -> "3063 Gather_2448" [label="[]", style=dashed]; +"3063 Gather_2448" -> "3067 Sub_2452" [label="[]", style=dashed]; +"3064 Shape_2449" -> "3066 Gather_2451" [label="[-1]", style=dashed]; +"3065 Constant_2450" -> "3066 Gather_2451" [label="[]", style=dashed]; +"3066 Gather_2451" -> "3067 Sub_2452" [label="[]", style=dashed]; +"3066 Gather_2451" -> "3071 Unsqueeze_2454" [label="[]", style=dashed]; +"3066 Gather_2451" -> "3075 Unsqueeze_2457" [label="[]", style=dashed]; +"3067 Sub_2452" -> "3069 Unsqueeze_2453" [label="[]", style=dashed]; +"3068 Constant_nncf_2836" -> "3069 Unsqueeze_2453" [label="[1]", style=dashed]; +"3069 Unsqueeze_2453" -> "3073 Slice_2456" [label="[1]", style=dashed]; +"3070 Constant_nncf_2838" -> "3071 Unsqueeze_2454" [label="[1]", style=dashed]; +"3071 Unsqueeze_2454" -> "3073 Slice_2456" [label="[1]", style=dashed]; +"3072 Constant_2455" -> "3073 Slice_2456" [label="[1]", style=dashed]; +"3073 Slice_2456" -> "3077 Slice_2459" [label="[-1, -1, -1, -1]", style=solid]; +"3074 Constant_nncf_2842" -> "3075 Unsqueeze_2457" [label="[1]", style=dashed]; +"3075 Unsqueeze_2457" -> "3077 Slice_2459" [label="[1]", style=dashed]; +"3076 Constant_2458" -> "3077 Slice_2459" [label="[1]", style=dashed]; +"3077 Slice_2459" -> "3078 Mul_2460" [label="[-1, -1, -1, -1]", style=solid]; +"3077 Slice_2459" -> "3080 Sub_2462" [label="[-1, -1, -1, -1]", style=solid]; +"3078 Mul_2460" -> "3083 Sub_2465" [label="[]", style=solid]; +"3079 Constant_2461" -> "3080 Sub_2462" [label="[]", style=solid]; +"3080 Sub_2462" -> "3082 Mul_2464" [label="[-1, -1, -1, -1]", style=solid]; +"3081 Constant_2463" -> "3082 Mul_2464" [label="[]", style=solid]; +"3082 Mul_2464" -> "3083 Sub_2465" [label="[-1, -1, -1, -1]", style=solid]; +"3083 Sub_2465" -> "3084 Shape_nncf_2852" [label="[]", style=solid]; +"3083 Sub_2465" -> "3085 Flatten_nncf_2853" [label="[]", style=solid]; +"3084 Shape_nncf_2852" -> "3087 Reshape_nncf_2855" [label="[-1]", style=dashed]; +"3085 Flatten_nncf_2853" -> "3086 Softmax_2466" [label="[]", style=solid]; +"3086 Softmax_2466" -> "3087 Reshape_nncf_2855" [label="[]", style=solid]; +"3087 Reshape_nncf_2855" -> "3088 MatMul_2467" [label="[]", style=solid]; +"3088 MatMul_2467" -> "3089 QuantizeLinear_2984_1" [label="[]", style=solid]; +"3089 QuantizeLinear_2984_1" -> "3090 DequantizeLinear_2984_1" [label="[]", style=dashed]; +"3090 DequantizeLinear_2984_1" -> "3091 Transpose_2468" [label="[]", style=solid]; +"3091 Transpose_2468" -> "3092 Shape_2469" [label="[]", style=solid]; +"3091 Transpose_2468" -> "3095 Shape_2472" [label="[]", style=solid]; +"3091 Transpose_2468" -> "3098 Shape_2475" [label="[]", style=solid]; +"3091 Transpose_2468" -> "3101 Shape_2478" [label="[]", style=solid]; +"3091 Transpose_2468" -> "3112 Reshape_2486" [label="[]", style=solid]; +"3092 Shape_2469" -> "3094 Gather_2471" [label="[-1]", style=dashed]; +"3093 Constant_2470" -> "3094 Gather_2471" [label="[]", style=dashed]; +"3094 Gather_2471" -> "3106 Unsqueeze_2482" [label="[]", style=dashed]; +"3095 Shape_2472" -> "3097 Gather_2474" [label="[-1]", style=dashed]; +"3096 Constant_2473" -> "3097 Gather_2474" [label="[]", style=dashed]; +"3097 Gather_2474" -> "3108 Unsqueeze_2483" [label="[]", style=dashed]; +"3098 Shape_2475" -> "3100 Gather_2477" [label="[-1]", style=dashed]; +"3099 Constant_2476" -> "3100 Gather_2477" [label="[]", style=dashed]; +"3100 Gather_2477" -> "3104 Mul_2481" [label="[]", style=dashed]; +"3101 Shape_2478" -> "3103 Gather_2480" [label="[-1]", style=dashed]; +"3102 Constant_2479" -> "3103 Gather_2480" [label="[]", style=dashed]; +"3103 Gather_2480" -> "3104 Mul_2481" [label="[]", style=dashed]; +"3104 Mul_2481" -> "3110 Unsqueeze_2484" [label="[]", style=dashed]; +"3105 Constant_nncf_2871" -> "3106 Unsqueeze_2482" [label="[1]", style=dashed]; +"3106 Unsqueeze_2482" -> "3111 Concat_2485" [label="[1]", style=dashed]; +"3107 Constant_nncf_2873" -> "3108 Unsqueeze_2483" [label="[1]", style=dashed]; +"3108 Unsqueeze_2483" -> "3111 Concat_2485" [label="[1]", style=dashed]; +"3109 Constant_nncf_2875" -> "3110 Unsqueeze_2484" [label="[1]", style=dashed]; +"3110 Unsqueeze_2484" -> "3111 Concat_2485" [label="[1]", style=dashed]; +"3111 Concat_2485" -> "3112 Reshape_2486" [label="[3]", style=dashed]; +"3112 Reshape_2486" -> "3113 Shape_2487" [label="[]", style=solid]; +"3112 Reshape_2486" -> "3116 Shape_2490" [label="[]", style=solid]; +"3112 Reshape_2486" -> "3119 Shape_2493" [label="[]", style=solid]; +"3112 Reshape_2486" -> "3125 Reshape_2498" [label="[]", style=solid]; +"3113 Shape_2487" -> "3115 Gather_2489" [label="[-1]", style=dashed]; +"3114 Constant_2488" -> "3115 Gather_2489" [label="[]", style=dashed]; +"3115 Gather_2489" -> "3130 Unsqueeze_2500" [label="[]", style=dashed]; +"3116 Shape_2490" -> "3118 Gather_2492" [label="[-1]", style=dashed]; +"3117 Constant_2491" -> "3118 Gather_2492" [label="[]", style=dashed]; +"3118 Gather_2492" -> "3132 Unsqueeze_2501" [label="[]", style=dashed]; +"3119 Shape_2493" -> "3121 Gather_2495" [label="[-1]", style=dashed]; +"3120 Constant_2494" -> "3121 Gather_2495" [label="[]", style=dashed]; +"3121 Gather_2495" -> "3123 Unsqueeze_2496" [label="[]", style=dashed]; +"3122 Constant_nncf_2888" -> "3123 Unsqueeze_2496" [label="[1]", style=dashed]; +"3123 Unsqueeze_2496" -> "3124 Concat_2497" [label="[1]", style=dashed]; +"3124 Concat_2497" -> "3125 Reshape_2498" [label="[2]", style=dashed]; +"3125 Reshape_2498" -> "3128 Gemm_2499" [label="[]", style=solid]; +"3126 QuantizeLinear_h.11.attn.c_proj.weight_1" -> "3127 DequantizeLinear_h.11.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"3127 DequantizeLinear_h.11.attn.c_proj.weight_1" -> "3128 Gemm_2499" [label="[768, 768]", style=solid]; +"3128 Gemm_2499" -> "3134 Reshape_2503" [label="[]", style=solid]; +"3129 Constant_nncf_2893" -> "3130 Unsqueeze_2500" [label="[1]", style=dashed]; +"3130 Unsqueeze_2500" -> "3133 Concat_2502" [label="[1]", style=dashed]; +"3131 Constant_nncf_2895" -> "3132 Unsqueeze_2501" [label="[1]", style=dashed]; +"3132 Unsqueeze_2501" -> "3133 Concat_2502" [label="[1]", style=dashed]; +"3133 Concat_2502" -> "3134 Reshape_2503" [label="[3]", style=dashed]; +"3134 Reshape_2503" -> "3135 Add_2504" [label="[]", style=solid]; +"3135 Add_2504" -> "3136 ReduceMean_2505" [label="[]", style=solid]; +"3135 Add_2504" -> "3137 Sub_2506" [label="[]", style=solid]; +"3135 Add_2504" -> "3208 Add_2563" [label="[]", style=solid]; +"3136 ReduceMean_2505" -> "3137 Sub_2506" [label="[]", style=solid]; +"3137 Sub_2506" -> "3139 Pow_2508" [label="[]", style=solid]; +"3137 Sub_2506" -> "3144 Div_2513" [label="[]", style=solid]; +"3138 Constant_2507" -> "3139 Pow_2508" [label="[]", style=solid]; +"3139 Pow_2508" -> "3140 ReduceMean_2509" [label="[]", style=solid]; +"3140 ReduceMean_2509" -> "3142 Add_2511" [label="[]", style=solid]; +"3141 Constant_2510" -> "3142 Add_2511" [label="[]", style=solid]; +"3142 Add_2511" -> "3143 Sqrt_2512" [label="[]", style=solid]; +"3143 Sqrt_2512" -> "3144 Div_2513" [label="[]", style=solid]; +"3144 Div_2513" -> "3145 Mul_2514" [label="[]", style=solid]; +"3145 Mul_2514" -> "3146 Add_2515" [label="[]", style=solid]; +"3146 Add_2515" -> "3147 QuantizeLinear_3036_1" [label="[]", style=solid]; +"3147 QuantizeLinear_3036_1" -> "3148 DequantizeLinear_3036_1" [label="[]", style=dashed]; +"3148 DequantizeLinear_3036_1" -> "3149 Shape_2516" [label="[]", style=solid]; +"3148 DequantizeLinear_3036_1" -> "3152 Shape_2519" [label="[]", style=solid]; +"3148 DequantizeLinear_3036_1" -> "3155 Shape_2522" [label="[]", style=solid]; +"3148 DequantizeLinear_3036_1" -> "3161 Reshape_2527" [label="[]", style=solid]; +"3149 Shape_2516" -> "3151 Gather_2518" [label="[-1]", style=dashed]; +"3150 Constant_2517" -> "3151 Gather_2518" [label="[]", style=dashed]; +"3151 Gather_2518" -> "3166 Unsqueeze_2529" [label="[]", style=dashed]; +"3152 Shape_2519" -> "3154 Gather_2521" [label="[-1]", style=dashed]; +"3153 Constant_2520" -> "3154 Gather_2521" [label="[]", style=dashed]; +"3154 Gather_2521" -> "3168 Unsqueeze_2530" [label="[]", style=dashed]; +"3155 Shape_2522" -> "3157 Gather_2524" [label="[-1]", style=dashed]; +"3156 Constant_2523" -> "3157 Gather_2524" [label="[]", style=dashed]; +"3157 Gather_2524" -> "3159 Unsqueeze_2525" [label="[]", style=dashed]; +"3158 Constant_nncf_2920" -> "3159 Unsqueeze_2525" [label="[1]", style=dashed]; +"3159 Unsqueeze_2525" -> "3160 Concat_2526" [label="[1]", style=dashed]; +"3160 Concat_2526" -> "3161 Reshape_2527" [label="[2]", style=dashed]; +"3161 Reshape_2527" -> "3164 Gemm_2528" [label="[]", style=solid]; +"3162 QuantizeLinear_h.11.mlp.c_fc.weight_1" -> "3163 DequantizeLinear_h.11.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"3163 DequantizeLinear_h.11.mlp.c_fc.weight_1" -> "3164 Gemm_2528" [label="[768, 3072]", style=solid]; +"3164 Gemm_2528" -> "3170 Reshape_2532" [label="[]", style=solid]; +"3165 Constant_nncf_2925" -> "3166 Unsqueeze_2529" [label="[1]", style=dashed]; +"3166 Unsqueeze_2529" -> "3169 Concat_2531" [label="[1]", style=dashed]; +"3167 Constant_nncf_2927" -> "3168 Unsqueeze_2530" [label="[1]", style=dashed]; +"3168 Unsqueeze_2530" -> "3169 Concat_2531" [label="[1]", style=dashed]; +"3169 Concat_2531" -> "3170 Reshape_2532" [label="[3]", style=dashed]; +"3170 Reshape_2532" -> "3172 Mul_2534" [label="[]", style=solid]; +"3170 Reshape_2532" -> "3174 Pow_2536" [label="[]", style=solid]; +"3170 Reshape_2532" -> "3177 Add_2539" [label="[]", style=solid]; +"3171 Constant_2533" -> "3172 Mul_2534" [label="[]", style=solid]; +"3172 Mul_2534" -> "3183 Mul_2545" [label="[]", style=solid]; +"3173 Constant_2535" -> "3174 Pow_2536" [label="[]", style=solid]; +"3174 Pow_2536" -> "3176 Mul_2538" [label="[]", style=solid]; +"3175 Constant_2537" -> "3176 Mul_2538" [label="[]", style=solid]; +"3176 Mul_2538" -> "3177 Add_2539" [label="[]", style=solid]; +"3177 Add_2539" -> "3179 Mul_2541" [label="[]", style=solid]; +"3178 Constant_2540" -> "3179 Mul_2541" [label="[]", style=solid]; +"3179 Mul_2541" -> "3180 Tanh_2542" [label="[]", style=solid]; +"3180 Tanh_2542" -> "3182 Add_2544" [label="[]", style=solid]; +"3181 Constant_2543" -> "3182 Add_2544" [label="[]", style=solid]; +"3182 Add_2544" -> "3183 Mul_2545" [label="[]", style=solid]; +"3183 Mul_2545" -> "3184 QuantizeLinear_3070_1" [label="[]", style=solid]; +"3184 QuantizeLinear_3070_1" -> "3185 DequantizeLinear_3070_1" [label="[]", style=dashed]; +"3185 DequantizeLinear_3070_1" -> "3186 Shape_2546" [label="[]", style=solid]; +"3185 DequantizeLinear_3070_1" -> "3189 Shape_2549" [label="[]", style=solid]; +"3185 DequantizeLinear_3070_1" -> "3192 Shape_2552" [label="[]", style=solid]; +"3185 DequantizeLinear_3070_1" -> "3198 Reshape_2557" [label="[]", style=solid]; +"3186 Shape_2546" -> "3188 Gather_2548" [label="[-1]", style=dashed]; +"3187 Constant_2547" -> "3188 Gather_2548" [label="[]", style=dashed]; +"3188 Gather_2548" -> "3203 Unsqueeze_2559" [label="[]", style=dashed]; +"3189 Shape_2549" -> "3191 Gather_2551" [label="[-1]", style=dashed]; +"3190 Constant_2550" -> "3191 Gather_2551" [label="[]", style=dashed]; +"3191 Gather_2551" -> "3205 Unsqueeze_2560" [label="[]", style=dashed]; +"3192 Shape_2552" -> "3194 Gather_2554" [label="[-1]", style=dashed]; +"3193 Constant_2553" -> "3194 Gather_2554" [label="[]", style=dashed]; +"3194 Gather_2554" -> "3196 Unsqueeze_2555" [label="[]", style=dashed]; +"3195 Constant_nncf_2953" -> "3196 Unsqueeze_2555" [label="[1]", style=dashed]; +"3196 Unsqueeze_2555" -> "3197 Concat_2556" [label="[1]", style=dashed]; +"3197 Concat_2556" -> "3198 Reshape_2557" [label="[2]", style=dashed]; +"3198 Reshape_2557" -> "3201 Gemm_2558" [label="[]", style=solid]; +"3199 QuantizeLinear_h.11.mlp.c_proj.weight_1" -> "3200 DequantizeLinear_h.11.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"3200 DequantizeLinear_h.11.mlp.c_proj.weight_1" -> "3201 Gemm_2558" [label="[3072, 768]", style=solid]; +"3201 Gemm_2558" -> "3207 Reshape_2562" [label="[]", style=solid]; +"3202 Constant_nncf_2958" -> "3203 Unsqueeze_2559" [label="[1]", style=dashed]; +"3203 Unsqueeze_2559" -> "3206 Concat_2561" [label="[1]", style=dashed]; +"3204 Constant_nncf_2960" -> "3205 Unsqueeze_2560" [label="[1]", style=dashed]; +"3205 Unsqueeze_2560" -> "3206 Concat_2561" [label="[1]", style=dashed]; +"3206 Concat_2561" -> "3207 Reshape_2562" [label="[3]", style=dashed]; +"3207 Reshape_2562" -> "3208 Add_2563" [label="[]", style=solid]; +"3208 Add_2563" -> "3209 ReduceMean_2564" [label="[]", style=solid]; +"3208 Add_2563" -> "3210 Sub_2565" [label="[]", style=solid]; +"3209 ReduceMean_2564" -> "3210 Sub_2565" [label="[]", style=solid]; +"3210 Sub_2565" -> "3212 Pow_2567" [label="[]", style=solid]; +"3210 Sub_2565" -> "3217 Div_2572" [label="[]", style=solid]; +"3211 Constant_2566" -> "3212 Pow_2567" [label="[]", style=solid]; +"3212 Pow_2567" -> "3213 ReduceMean_2568" [label="[]", style=solid]; +"3213 ReduceMean_2568" -> "3215 Add_2570" [label="[]", style=solid]; +"3214 Constant_2569" -> "3215 Add_2570" [label="[]", style=solid]; +"3215 Add_2570" -> "3216 Sqrt_2571" [label="[]", style=solid]; +"3216 Sqrt_2571" -> "3217 Div_2572" [label="[]", style=solid]; +"3217 Div_2572" -> "3218 Mul_2573" [label="[]", style=solid]; +"3218 Mul_2573" -> "3219 Add_2574" [label="[]", style=solid]; +"3219 Add_2574" -> "3229 Reshape_2580" [label="[]", style=solid]; +"3220 Constant_nncf_2976" -> "3221 Unsqueeze_2575" [label="[1]", style=dashed]; +"3221 Unsqueeze_2575" -> "3228 Concat_2579" [label="[1]", style=dashed]; +"3222 Constant_nncf_2978" -> "3223 Unsqueeze_2576" [label="[1]", style=dashed]; +"3223 Unsqueeze_2576" -> "3228 Concat_2579" [label="[1]", style=dashed]; +"3224 Constant_nncf_2980" -> "3225 Unsqueeze_2577" [label="[1]", style=dashed]; +"3225 Unsqueeze_2577" -> "3228 Concat_2579" [label="[1]", style=dashed]; +"3226 Constant_nncf_2982" -> "3227 Unsqueeze_2578" [label="[1]", style=dashed]; +"3227 Unsqueeze_2578" -> "3228 Concat_2579" [label="[1]", style=dashed]; +"3228 Concat_2579" -> "3229 Reshape_2580" [label="[4]", style=dashed]; +"3229 Reshape_2580" -> "3231 nncf_model_output_0" [label="[1, 1, 8, 768]", style=solid]; +"3230 nncf_model_input_0" -> "0 Shape_0" [label="[-1, -1, -1]", style=dashed]; +"3230 nncf_model_input_0" -> "3 Shape_3" [label="[-1, -1, -1]", style=dashed]; +"3230 nncf_model_input_0" -> "6 Shape_6" [label="[-1, -1, -1]", style=dashed]; +"3230 nncf_model_input_0" -> "12 Reshape_11" [label="[-1, -1, -1]", style=dashed]; } diff --git a/tests/onnx/data/reference_graphs/quantization/retinanet-9.dot b/tests/onnx/data/reference_graphs/quantization/retinanet-9.dot index 9d2f66780d5..26364efaeb3 100644 --- a/tests/onnx/data/reference_graphs/quantization/retinanet-9.dot +++ b/tests/onnx/data/reference_graphs/quantization/retinanet-9.dot @@ -837,8 +837,8 @@ strict digraph { "835 Resize_nncf_345" [id=835, type=Resize]; "836 QuantizeLinear_1006_1" [id=836, type=QuantizeLinear]; "837 DequantizeLinear_1006_1" [id=837, type=DequantizeLinear]; -"838 QuantizeLinear_1129_1" [id=838, type=QuantizeLinear]; -"839 DequantizeLinear_1129_1" [id=839, type=DequantizeLinear]; +"838 QuantizeLinear__v_1011_1" [id=838, type=QuantizeLinear]; +"839 DequantizeLinear__v_1011_1" [id=839, type=DequantizeLinear]; "840 Add_345" [id=840, type=Add]; "841 QuantizeLinear_backbones.ResNet101FPN.lateral3.weight_1" [id=841, type=QuantizeLinear]; "842 DequantizeLinear_backbones.ResNet101FPN.lateral3.weight_1" [id=842, type=DequantizeLinear]; @@ -849,8 +849,8 @@ strict digraph { "847 Resize_nncf_349" [id=847, type=Resize]; "848 QuantizeLinear_1017_1" [id=848, type=QuantizeLinear]; "849 DequantizeLinear_1017_1" [id=849, type=DequantizeLinear]; -"850 QuantizeLinear_1130_1" [id=850, type=QuantizeLinear]; -"851 DequantizeLinear_1130_1" [id=851, type=DequantizeLinear]; +"850 QuantizeLinear__v_1012_1" [id=850, type=QuantizeLinear]; +"851 DequantizeLinear__v_1012_1" [id=851, type=DequantizeLinear]; "852 Add_348" [id=852, type=Add]; "853 QuantizeLinear_backbones.ResNet101FPN.pyramid6.weight_1" [id=853, type=QuantizeLinear]; "854 DequantizeLinear_backbones.ResNet101FPN.pyramid6.weight_1" [id=854, type=DequantizeLinear]; @@ -1962,11 +1962,11 @@ strict digraph { "833 QuantizeLinear_1005_1" -> "834 DequantizeLinear_1005_1" [label="[1, 256, 15, 20]", style=dashed]; "834 DequantizeLinear_1005_1" -> "835 Resize_nncf_345" [label="[1, 256, 15, 20]", style=solid]; "834 DequantizeLinear_1005_1" -> "876 Conv_354" [label="[1, 256, 15, 20]", style=solid]; -"835 Resize_nncf_345" -> "838 QuantizeLinear_1129_1" [label="[1, 256, 30, 40]", style=solid]; +"835 Resize_nncf_345" -> "838 QuantizeLinear__v_1011_1" [label="[1, 256, 30, 40]", style=solid]; "836 QuantizeLinear_1006_1" -> "837 DequantizeLinear_1006_1" [label="[1, 256, 30, 40]", style=dashed]; "837 DequantizeLinear_1006_1" -> "840 Add_345" [label="[1, 256, 30, 40]", style=solid]; -"838 QuantizeLinear_1129_1" -> "839 DequantizeLinear_1129_1" [label="[1, 256, 30, 40]", style=dashed]; -"839 DequantizeLinear_1129_1" -> "840 Add_345" [label="[1, 256, 30, 40]", style=solid]; +"838 QuantizeLinear__v_1011_1" -> "839 DequantizeLinear__v_1011_1" [label="[1, 256, 30, 40]", style=dashed]; +"839 DequantizeLinear__v_1011_1" -> "840 Add_345" [label="[1, 256, 30, 40]", style=solid]; "840 Add_345" -> "845 QuantizeLinear_1016_1" [label="[1, 256, 30, 40]", style=solid]; "841 QuantizeLinear_backbones.ResNet101FPN.lateral3.weight_1" -> "842 DequantizeLinear_backbones.ResNet101FPN.lateral3.weight_1" [label="[256, 512, 1, 1]", style=dashed]; "842 DequantizeLinear_backbones.ResNet101FPN.lateral3.weight_1" -> "843 Conv_346" [label="[256, 512, 1, 1]", style=solid]; @@ -1975,11 +1975,11 @@ strict digraph { "845 QuantizeLinear_1016_1" -> "846 DequantizeLinear_1016_1" [label="[1, 256, 30, 40]", style=dashed]; "846 DequantizeLinear_1016_1" -> "847 Resize_nncf_349" [label="[1, 256, 30, 40]", style=solid]; "846 DequantizeLinear_1016_1" -> "873 Conv_353" [label="[1, 256, 30, 40]", style=solid]; -"847 Resize_nncf_349" -> "850 QuantizeLinear_1130_1" [label="[1, 256, 60, 80]", style=solid]; +"847 Resize_nncf_349" -> "850 QuantizeLinear__v_1012_1" [label="[1, 256, 60, 80]", style=solid]; "848 QuantizeLinear_1017_1" -> "849 DequantizeLinear_1017_1" [label="[1, 256, 60, 80]", style=dashed]; "849 DequantizeLinear_1017_1" -> "852 Add_348" [label="[1, 256, 60, 80]", style=solid]; -"850 QuantizeLinear_1130_1" -> "851 DequantizeLinear_1130_1" [label="[1, 256, 60, 80]", style=dashed]; -"851 DequantizeLinear_1130_1" -> "852 Add_348" [label="[1, 256, 60, 80]", style=solid]; +"850 QuantizeLinear__v_1012_1" -> "851 DequantizeLinear__v_1012_1" [label="[1, 256, 60, 80]", style=dashed]; +"851 DequantizeLinear__v_1012_1" -> "852 Add_348" [label="[1, 256, 60, 80]", style=solid]; "852 Add_348" -> "866 QuantizeLinear_1027_1" [label="[1, 256, 60, 80]", style=solid]; "853 QuantizeLinear_backbones.ResNet101FPN.pyramid6.weight_1" -> "854 DequantizeLinear_backbones.ResNet101FPN.pyramid6.weight_1" [label="[256, 2048, 3, 3]", style=dashed]; "854 DequantizeLinear_backbones.ResNet101FPN.pyramid6.weight_1" -> "855 Conv_349" [label="[256, 2048, 3, 3]", style=solid]; diff --git a/tests/onnx/data/reference_graphs/quantization/shufflenet_v2_x1_0.dot b/tests/onnx/data/reference_graphs/quantization/shufflenet_v2_x1_0.dot index 275ea36935a..b1e0c42c485 100644 --- a/tests/onnx/data/reference_graphs/quantization/shufflenet_v2_x1_0.dot +++ b/tests/onnx/data/reference_graphs/quantization/shufflenet_v2_x1_0.dot @@ -665,28 +665,28 @@ strict digraph { "51 /stage2/stage2.1/Constant_4" -> "52 /stage2/stage2.1/Mul" [label="[1]", style=dashed]; "52 /stage2/stage2.1/Mul" -> "53 /stage2/stage2.1/Slice" [label="[1]", style=dashed]; "52 /stage2/stage2.1/Mul" -> "56 /stage2/stage2.1/Slice_1" [label="[1]", style=dashed]; -"53 /stage2/stage2.1/Slice" -> "74 /stage2/stage2.1/Concat" [label="[]", style=solid]; +"53 /stage2/stage2.1/Slice" -> "74 /stage2/stage2.1/Concat" [label="[-1, -1, -1, -1]", style=solid]; "54 /stage2/stage2.1/Constant_5" -> "55 /stage2/stage2.1/Mul_1" [label="[1]", style=dashed]; "55 /stage2/stage2.1/Mul_1" -> "56 /stage2/stage2.1/Slice_1" [label="[1]", style=dashed]; -"56 /stage2/stage2.1/Slice_1" -> "59 /stage2/stage2.1/branch2/branch2.0/Conv" [label="[]", style=solid]; +"56 /stage2/stage2.1/Slice_1" -> "59 /stage2/stage2.1/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; "57 QuantizeLinear_onnx^^Conv_1157_1" -> "58 DequantizeLinear_onnx^^Conv_1157_1" [label="[58, 58, 1, 1]", style=dashed]; "58 DequantizeLinear_onnx^^Conv_1157_1" -> "59 /stage2/stage2.1/branch2/branch2.0/Conv" [label="[58, 58, 1, 1]", style=solid]; -"59 /stage2/stage2.1/branch2/branch2.0/Conv" -> "60 /stage2/stage2.1/branch2/branch2.2/Relu" [label="[]", style=solid]; -"60 /stage2/stage2.1/branch2/branch2.2/Relu" -> "61 QuantizeLinear_/stage2/stage2.1/branch2/branch2.2/Relu_output_0_1" [label="[]", style=solid]; -"61 QuantizeLinear_/stage2/stage2.1/branch2/branch2.2/Relu_output_0_1" -> "62 DequantizeLinear_/stage2/stage2.1/branch2/branch2.2/Relu_output_0_1" [label="[]", style=dashed]; -"62 DequantizeLinear_/stage2/stage2.1/branch2/branch2.2/Relu_output_0_1" -> "65 /stage2/stage2.1/branch2/branch2.3/Conv" [label="[]", style=solid]; +"59 /stage2/stage2.1/branch2/branch2.0/Conv" -> "60 /stage2/stage2.1/branch2/branch2.2/Relu" [label="[-1, 58, -1, -1]", style=solid]; +"60 /stage2/stage2.1/branch2/branch2.2/Relu" -> "61 QuantizeLinear_/stage2/stage2.1/branch2/branch2.2/Relu_output_0_1" [label="[-1, 58, -1, -1]", style=solid]; +"61 QuantizeLinear_/stage2/stage2.1/branch2/branch2.2/Relu_output_0_1" -> "62 DequantizeLinear_/stage2/stage2.1/branch2/branch2.2/Relu_output_0_1" [label="[-1, 58, -1, -1]", style=dashed]; +"62 DequantizeLinear_/stage2/stage2.1/branch2/branch2.2/Relu_output_0_1" -> "65 /stage2/stage2.1/branch2/branch2.3/Conv" [label="[-1, 58, -1, -1]", style=solid]; "63 QuantizeLinear_onnx^^Conv_1160_1" -> "64 DequantizeLinear_onnx^^Conv_1160_1" [label="[58, 1, 3, 3]", style=dashed]; "64 DequantizeLinear_onnx^^Conv_1160_1" -> "65 /stage2/stage2.1/branch2/branch2.3/Conv" [label="[58, 1, 3, 3]", style=solid]; -"65 /stage2/stage2.1/branch2/branch2.3/Conv" -> "66 QuantizeLinear_/stage2/stage2.1/branch2/branch2.3/Conv_output_0_1" [label="[]", style=solid]; -"66 QuantizeLinear_/stage2/stage2.1/branch2/branch2.3/Conv_output_0_1" -> "67 DequantizeLinear_/stage2/stage2.1/branch2/branch2.3/Conv_output_0_1" [label="[]", style=dashed]; -"67 DequantizeLinear_/stage2/stage2.1/branch2/branch2.3/Conv_output_0_1" -> "70 /stage2/stage2.1/branch2/branch2.5/Conv" [label="[]", style=solid]; +"65 /stage2/stage2.1/branch2/branch2.3/Conv" -> "66 QuantizeLinear_/stage2/stage2.1/branch2/branch2.3/Conv_output_0_1" [label="[-1, 58, -1, -1]", style=solid]; +"66 QuantizeLinear_/stage2/stage2.1/branch2/branch2.3/Conv_output_0_1" -> "67 DequantizeLinear_/stage2/stage2.1/branch2/branch2.3/Conv_output_0_1" [label="[-1, 58, -1, -1]", style=dashed]; +"67 DequantizeLinear_/stage2/stage2.1/branch2/branch2.3/Conv_output_0_1" -> "70 /stage2/stage2.1/branch2/branch2.5/Conv" [label="[-1, 58, -1, -1]", style=solid]; "68 QuantizeLinear_onnx^^Conv_1163_1" -> "69 DequantizeLinear_onnx^^Conv_1163_1" [label="[58, 58, 1, 1]", style=dashed]; "69 DequantizeLinear_onnx^^Conv_1163_1" -> "70 /stage2/stage2.1/branch2/branch2.5/Conv" [label="[58, 58, 1, 1]", style=solid]; -"70 /stage2/stage2.1/branch2/branch2.5/Conv" -> "71 /stage2/stage2.1/branch2/branch2.7/Relu" [label="[]", style=solid]; -"71 /stage2/stage2.1/branch2/branch2.7/Relu" -> "72 QuantizeLinear_/stage2/stage2.1/branch2/branch2.7/Relu_output_0_1" [label="[]", style=solid]; -"72 QuantizeLinear_/stage2/stage2.1/branch2/branch2.7/Relu_output_0_1" -> "73 DequantizeLinear_/stage2/stage2.1/branch2/branch2.7/Relu_output_0_1" [label="[]", style=dashed]; -"73 DequantizeLinear_/stage2/stage2.1/branch2/branch2.7/Relu_output_0_1" -> "74 /stage2/stage2.1/Concat" [label="[]", style=solid]; -"74 /stage2/stage2.1/Concat" -> "76 /stage2/stage2.1/Reshape" [label="[]", style=solid]; +"70 /stage2/stage2.1/branch2/branch2.5/Conv" -> "71 /stage2/stage2.1/branch2/branch2.7/Relu" [label="[-1, 58, -1, -1]", style=solid]; +"71 /stage2/stage2.1/branch2/branch2.7/Relu" -> "72 QuantizeLinear_/stage2/stage2.1/branch2/branch2.7/Relu_output_0_1" [label="[-1, 58, -1, -1]", style=solid]; +"72 QuantizeLinear_/stage2/stage2.1/branch2/branch2.7/Relu_output_0_1" -> "73 DequantizeLinear_/stage2/stage2.1/branch2/branch2.7/Relu_output_0_1" [label="[-1, 58, -1, -1]", style=dashed]; +"73 DequantizeLinear_/stage2/stage2.1/branch2/branch2.7/Relu_output_0_1" -> "74 /stage2/stage2.1/Concat" [label="[-1, 58, -1, -1]", style=solid]; +"74 /stage2/stage2.1/Concat" -> "76 /stage2/stage2.1/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "75 /stage2/stage2.1/Constant_6" -> "76 /stage2/stage2.1/Reshape" [label="[5]", style=dashed]; "76 /stage2/stage2.1/Reshape" -> "77 /stage2/stage2.1/Transpose" [label="[1, 2, 58, 28, 28]", style=solid]; "77 /stage2/stage2.1/Transpose" -> "79 /stage2/stage2.1/Reshape_1" [label="[1, 58, 2, 28, 28]", style=solid]; @@ -708,28 +708,28 @@ strict digraph { "88 /stage2/stage2.2/Constant_4" -> "89 /stage2/stage2.2/Mul" [label="[1]", style=dashed]; "89 /stage2/stage2.2/Mul" -> "90 /stage2/stage2.2/Slice" [label="[1]", style=dashed]; "89 /stage2/stage2.2/Mul" -> "93 /stage2/stage2.2/Slice_1" [label="[1]", style=dashed]; -"90 /stage2/stage2.2/Slice" -> "111 /stage2/stage2.2/Concat" [label="[]", style=solid]; +"90 /stage2/stage2.2/Slice" -> "111 /stage2/stage2.2/Concat" [label="[-1, -1, -1, -1]", style=solid]; "91 /stage2/stage2.2/Constant_5" -> "92 /stage2/stage2.2/Mul_1" [label="[1]", style=dashed]; "92 /stage2/stage2.2/Mul_1" -> "93 /stage2/stage2.2/Slice_1" [label="[1]", style=dashed]; -"93 /stage2/stage2.2/Slice_1" -> "96 /stage2/stage2.2/branch2/branch2.0/Conv" [label="[]", style=solid]; +"93 /stage2/stage2.2/Slice_1" -> "96 /stage2/stage2.2/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; "94 QuantizeLinear_onnx^^Conv_1166_1" -> "95 DequantizeLinear_onnx^^Conv_1166_1" [label="[58, 58, 1, 1]", style=dashed]; "95 DequantizeLinear_onnx^^Conv_1166_1" -> "96 /stage2/stage2.2/branch2/branch2.0/Conv" [label="[58, 58, 1, 1]", style=solid]; -"96 /stage2/stage2.2/branch2/branch2.0/Conv" -> "97 /stage2/stage2.2/branch2/branch2.2/Relu" [label="[]", style=solid]; -"97 /stage2/stage2.2/branch2/branch2.2/Relu" -> "98 QuantizeLinear_/stage2/stage2.2/branch2/branch2.2/Relu_output_0_1" [label="[]", style=solid]; -"98 QuantizeLinear_/stage2/stage2.2/branch2/branch2.2/Relu_output_0_1" -> "99 DequantizeLinear_/stage2/stage2.2/branch2/branch2.2/Relu_output_0_1" [label="[]", style=dashed]; -"99 DequantizeLinear_/stage2/stage2.2/branch2/branch2.2/Relu_output_0_1" -> "102 /stage2/stage2.2/branch2/branch2.3/Conv" [label="[]", style=solid]; +"96 /stage2/stage2.2/branch2/branch2.0/Conv" -> "97 /stage2/stage2.2/branch2/branch2.2/Relu" [label="[-1, 58, -1, -1]", style=solid]; +"97 /stage2/stage2.2/branch2/branch2.2/Relu" -> "98 QuantizeLinear_/stage2/stage2.2/branch2/branch2.2/Relu_output_0_1" [label="[-1, 58, -1, -1]", style=solid]; +"98 QuantizeLinear_/stage2/stage2.2/branch2/branch2.2/Relu_output_0_1" -> "99 DequantizeLinear_/stage2/stage2.2/branch2/branch2.2/Relu_output_0_1" [label="[-1, 58, -1, -1]", style=dashed]; +"99 DequantizeLinear_/stage2/stage2.2/branch2/branch2.2/Relu_output_0_1" -> "102 /stage2/stage2.2/branch2/branch2.3/Conv" [label="[-1, 58, -1, -1]", style=solid]; "100 QuantizeLinear_onnx^^Conv_1169_1" -> "101 DequantizeLinear_onnx^^Conv_1169_1" [label="[58, 1, 3, 3]", style=dashed]; "101 DequantizeLinear_onnx^^Conv_1169_1" -> "102 /stage2/stage2.2/branch2/branch2.3/Conv" [label="[58, 1, 3, 3]", style=solid]; -"102 /stage2/stage2.2/branch2/branch2.3/Conv" -> "103 QuantizeLinear_/stage2/stage2.2/branch2/branch2.3/Conv_output_0_1" [label="[]", style=solid]; -"103 QuantizeLinear_/stage2/stage2.2/branch2/branch2.3/Conv_output_0_1" -> "104 DequantizeLinear_/stage2/stage2.2/branch2/branch2.3/Conv_output_0_1" [label="[]", style=dashed]; -"104 DequantizeLinear_/stage2/stage2.2/branch2/branch2.3/Conv_output_0_1" -> "107 /stage2/stage2.2/branch2/branch2.5/Conv" [label="[]", style=solid]; +"102 /stage2/stage2.2/branch2/branch2.3/Conv" -> "103 QuantizeLinear_/stage2/stage2.2/branch2/branch2.3/Conv_output_0_1" [label="[-1, 58, -1, -1]", style=solid]; +"103 QuantizeLinear_/stage2/stage2.2/branch2/branch2.3/Conv_output_0_1" -> "104 DequantizeLinear_/stage2/stage2.2/branch2/branch2.3/Conv_output_0_1" [label="[-1, 58, -1, -1]", style=dashed]; +"104 DequantizeLinear_/stage2/stage2.2/branch2/branch2.3/Conv_output_0_1" -> "107 /stage2/stage2.2/branch2/branch2.5/Conv" [label="[-1, 58, -1, -1]", style=solid]; "105 QuantizeLinear_onnx^^Conv_1172_1" -> "106 DequantizeLinear_onnx^^Conv_1172_1" [label="[58, 58, 1, 1]", style=dashed]; "106 DequantizeLinear_onnx^^Conv_1172_1" -> "107 /stage2/stage2.2/branch2/branch2.5/Conv" [label="[58, 58, 1, 1]", style=solid]; -"107 /stage2/stage2.2/branch2/branch2.5/Conv" -> "108 /stage2/stage2.2/branch2/branch2.7/Relu" [label="[]", style=solid]; -"108 /stage2/stage2.2/branch2/branch2.7/Relu" -> "109 QuantizeLinear_/stage2/stage2.2/branch2/branch2.7/Relu_output_0_1" [label="[]", style=solid]; -"109 QuantizeLinear_/stage2/stage2.2/branch2/branch2.7/Relu_output_0_1" -> "110 DequantizeLinear_/stage2/stage2.2/branch2/branch2.7/Relu_output_0_1" [label="[]", style=dashed]; -"110 DequantizeLinear_/stage2/stage2.2/branch2/branch2.7/Relu_output_0_1" -> "111 /stage2/stage2.2/Concat" [label="[]", style=solid]; -"111 /stage2/stage2.2/Concat" -> "113 /stage2/stage2.2/Reshape" [label="[]", style=solid]; +"107 /stage2/stage2.2/branch2/branch2.5/Conv" -> "108 /stage2/stage2.2/branch2/branch2.7/Relu" [label="[-1, 58, -1, -1]", style=solid]; +"108 /stage2/stage2.2/branch2/branch2.7/Relu" -> "109 QuantizeLinear_/stage2/stage2.2/branch2/branch2.7/Relu_output_0_1" [label="[-1, 58, -1, -1]", style=solid]; +"109 QuantizeLinear_/stage2/stage2.2/branch2/branch2.7/Relu_output_0_1" -> "110 DequantizeLinear_/stage2/stage2.2/branch2/branch2.7/Relu_output_0_1" [label="[-1, 58, -1, -1]", style=dashed]; +"110 DequantizeLinear_/stage2/stage2.2/branch2/branch2.7/Relu_output_0_1" -> "111 /stage2/stage2.2/Concat" [label="[-1, 58, -1, -1]", style=solid]; +"111 /stage2/stage2.2/Concat" -> "113 /stage2/stage2.2/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "112 /stage2/stage2.2/Constant_6" -> "113 /stage2/stage2.2/Reshape" [label="[5]", style=dashed]; "113 /stage2/stage2.2/Reshape" -> "114 /stage2/stage2.2/Transpose" [label="[1, 2, 58, 28, 28]", style=solid]; "114 /stage2/stage2.2/Transpose" -> "116 /stage2/stage2.2/Reshape_1" [label="[1, 58, 2, 28, 28]", style=solid]; @@ -751,28 +751,28 @@ strict digraph { "125 /stage2/stage2.3/Constant_4" -> "126 /stage2/stage2.3/Mul" [label="[1]", style=dashed]; "126 /stage2/stage2.3/Mul" -> "127 /stage2/stage2.3/Slice" [label="[1]", style=dashed]; "126 /stage2/stage2.3/Mul" -> "130 /stage2/stage2.3/Slice_1" [label="[1]", style=dashed]; -"127 /stage2/stage2.3/Slice" -> "148 /stage2/stage2.3/Concat" [label="[]", style=solid]; +"127 /stage2/stage2.3/Slice" -> "148 /stage2/stage2.3/Concat" [label="[-1, -1, -1, -1]", style=solid]; "128 /stage2/stage2.3/Constant_5" -> "129 /stage2/stage2.3/Mul_1" [label="[1]", style=dashed]; "129 /stage2/stage2.3/Mul_1" -> "130 /stage2/stage2.3/Slice_1" [label="[1]", style=dashed]; -"130 /stage2/stage2.3/Slice_1" -> "133 /stage2/stage2.3/branch2/branch2.0/Conv" [label="[]", style=solid]; +"130 /stage2/stage2.3/Slice_1" -> "133 /stage2/stage2.3/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; "131 QuantizeLinear_onnx^^Conv_1175_1" -> "132 DequantizeLinear_onnx^^Conv_1175_1" [label="[58, 58, 1, 1]", style=dashed]; "132 DequantizeLinear_onnx^^Conv_1175_1" -> "133 /stage2/stage2.3/branch2/branch2.0/Conv" [label="[58, 58, 1, 1]", style=solid]; -"133 /stage2/stage2.3/branch2/branch2.0/Conv" -> "134 /stage2/stage2.3/branch2/branch2.2/Relu" [label="[]", style=solid]; -"134 /stage2/stage2.3/branch2/branch2.2/Relu" -> "135 QuantizeLinear_/stage2/stage2.3/branch2/branch2.2/Relu_output_0_1" [label="[]", style=solid]; -"135 QuantizeLinear_/stage2/stage2.3/branch2/branch2.2/Relu_output_0_1" -> "136 DequantizeLinear_/stage2/stage2.3/branch2/branch2.2/Relu_output_0_1" [label="[]", style=dashed]; -"136 DequantizeLinear_/stage2/stage2.3/branch2/branch2.2/Relu_output_0_1" -> "139 /stage2/stage2.3/branch2/branch2.3/Conv" [label="[]", style=solid]; +"133 /stage2/stage2.3/branch2/branch2.0/Conv" -> "134 /stage2/stage2.3/branch2/branch2.2/Relu" [label="[-1, 58, -1, -1]", style=solid]; +"134 /stage2/stage2.3/branch2/branch2.2/Relu" -> "135 QuantizeLinear_/stage2/stage2.3/branch2/branch2.2/Relu_output_0_1" [label="[-1, 58, -1, -1]", style=solid]; +"135 QuantizeLinear_/stage2/stage2.3/branch2/branch2.2/Relu_output_0_1" -> "136 DequantizeLinear_/stage2/stage2.3/branch2/branch2.2/Relu_output_0_1" [label="[-1, 58, -1, -1]", style=dashed]; +"136 DequantizeLinear_/stage2/stage2.3/branch2/branch2.2/Relu_output_0_1" -> "139 /stage2/stage2.3/branch2/branch2.3/Conv" [label="[-1, 58, -1, -1]", style=solid]; "137 QuantizeLinear_onnx^^Conv_1178_1" -> "138 DequantizeLinear_onnx^^Conv_1178_1" [label="[58, 1, 3, 3]", style=dashed]; "138 DequantizeLinear_onnx^^Conv_1178_1" -> "139 /stage2/stage2.3/branch2/branch2.3/Conv" [label="[58, 1, 3, 3]", style=solid]; -"139 /stage2/stage2.3/branch2/branch2.3/Conv" -> "140 QuantizeLinear_/stage2/stage2.3/branch2/branch2.3/Conv_output_0_1" [label="[]", style=solid]; -"140 QuantizeLinear_/stage2/stage2.3/branch2/branch2.3/Conv_output_0_1" -> "141 DequantizeLinear_/stage2/stage2.3/branch2/branch2.3/Conv_output_0_1" [label="[]", style=dashed]; -"141 DequantizeLinear_/stage2/stage2.3/branch2/branch2.3/Conv_output_0_1" -> "144 /stage2/stage2.3/branch2/branch2.5/Conv" [label="[]", style=solid]; +"139 /stage2/stage2.3/branch2/branch2.3/Conv" -> "140 QuantizeLinear_/stage2/stage2.3/branch2/branch2.3/Conv_output_0_1" [label="[-1, 58, -1, -1]", style=solid]; +"140 QuantizeLinear_/stage2/stage2.3/branch2/branch2.3/Conv_output_0_1" -> "141 DequantizeLinear_/stage2/stage2.3/branch2/branch2.3/Conv_output_0_1" [label="[-1, 58, -1, -1]", style=dashed]; +"141 DequantizeLinear_/stage2/stage2.3/branch2/branch2.3/Conv_output_0_1" -> "144 /stage2/stage2.3/branch2/branch2.5/Conv" [label="[-1, 58, -1, -1]", style=solid]; "142 QuantizeLinear_onnx^^Conv_1181_1" -> "143 DequantizeLinear_onnx^^Conv_1181_1" [label="[58, 58, 1, 1]", style=dashed]; "143 DequantizeLinear_onnx^^Conv_1181_1" -> "144 /stage2/stage2.3/branch2/branch2.5/Conv" [label="[58, 58, 1, 1]", style=solid]; -"144 /stage2/stage2.3/branch2/branch2.5/Conv" -> "145 /stage2/stage2.3/branch2/branch2.7/Relu" [label="[]", style=solid]; -"145 /stage2/stage2.3/branch2/branch2.7/Relu" -> "146 QuantizeLinear_/stage2/stage2.3/branch2/branch2.7/Relu_output_0_1" [label="[]", style=solid]; -"146 QuantizeLinear_/stage2/stage2.3/branch2/branch2.7/Relu_output_0_1" -> "147 DequantizeLinear_/stage2/stage2.3/branch2/branch2.7/Relu_output_0_1" [label="[]", style=dashed]; -"147 DequantizeLinear_/stage2/stage2.3/branch2/branch2.7/Relu_output_0_1" -> "148 /stage2/stage2.3/Concat" [label="[]", style=solid]; -"148 /stage2/stage2.3/Concat" -> "150 /stage2/stage2.3/Reshape" [label="[]", style=solid]; +"144 /stage2/stage2.3/branch2/branch2.5/Conv" -> "145 /stage2/stage2.3/branch2/branch2.7/Relu" [label="[-1, 58, -1, -1]", style=solid]; +"145 /stage2/stage2.3/branch2/branch2.7/Relu" -> "146 QuantizeLinear_/stage2/stage2.3/branch2/branch2.7/Relu_output_0_1" [label="[-1, 58, -1, -1]", style=solid]; +"146 QuantizeLinear_/stage2/stage2.3/branch2/branch2.7/Relu_output_0_1" -> "147 DequantizeLinear_/stage2/stage2.3/branch2/branch2.7/Relu_output_0_1" [label="[-1, 58, -1, -1]", style=dashed]; +"147 DequantizeLinear_/stage2/stage2.3/branch2/branch2.7/Relu_output_0_1" -> "148 /stage2/stage2.3/Concat" [label="[-1, 58, -1, -1]", style=solid]; +"148 /stage2/stage2.3/Concat" -> "150 /stage2/stage2.3/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "149 /stage2/stage2.3/Constant_6" -> "150 /stage2/stage2.3/Reshape" [label="[5]", style=dashed]; "150 /stage2/stage2.3/Reshape" -> "151 /stage2/stage2.3/Transpose" [label="[1, 2, 58, 28, 28]", style=solid]; "151 /stage2/stage2.3/Transpose" -> "153 /stage2/stage2.3/Reshape_1" [label="[1, 58, 2, 28, 28]", style=solid]; @@ -829,28 +829,28 @@ strict digraph { "196 /stage3/stage3.1/Constant_4" -> "197 /stage3/stage3.1/Mul" [label="[1]", style=dashed]; "197 /stage3/stage3.1/Mul" -> "198 /stage3/stage3.1/Slice" [label="[1]", style=dashed]; "197 /stage3/stage3.1/Mul" -> "201 /stage3/stage3.1/Slice_1" [label="[1]", style=dashed]; -"198 /stage3/stage3.1/Slice" -> "219 /stage3/stage3.1/Concat" [label="[]", style=solid]; +"198 /stage3/stage3.1/Slice" -> "219 /stage3/stage3.1/Concat" [label="[-1, -1, -1, -1]", style=solid]; "199 /stage3/stage3.1/Constant_5" -> "200 /stage3/stage3.1/Mul_1" [label="[1]", style=dashed]; "200 /stage3/stage3.1/Mul_1" -> "201 /stage3/stage3.1/Slice_1" [label="[1]", style=dashed]; -"201 /stage3/stage3.1/Slice_1" -> "204 /stage3/stage3.1/branch2/branch2.0/Conv" [label="[]", style=solid]; +"201 /stage3/stage3.1/Slice_1" -> "204 /stage3/stage3.1/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; "202 QuantizeLinear_onnx^^Conv_1199_1" -> "203 DequantizeLinear_onnx^^Conv_1199_1" [label="[116, 116, 1, 1]", style=dashed]; "203 DequantizeLinear_onnx^^Conv_1199_1" -> "204 /stage3/stage3.1/branch2/branch2.0/Conv" [label="[116, 116, 1, 1]", style=solid]; -"204 /stage3/stage3.1/branch2/branch2.0/Conv" -> "205 /stage3/stage3.1/branch2/branch2.2/Relu" [label="[]", style=solid]; -"205 /stage3/stage3.1/branch2/branch2.2/Relu" -> "206 QuantizeLinear_/stage3/stage3.1/branch2/branch2.2/Relu_output_0_1" [label="[]", style=solid]; -"206 QuantizeLinear_/stage3/stage3.1/branch2/branch2.2/Relu_output_0_1" -> "207 DequantizeLinear_/stage3/stage3.1/branch2/branch2.2/Relu_output_0_1" [label="[]", style=dashed]; -"207 DequantizeLinear_/stage3/stage3.1/branch2/branch2.2/Relu_output_0_1" -> "210 /stage3/stage3.1/branch2/branch2.3/Conv" [label="[]", style=solid]; +"204 /stage3/stage3.1/branch2/branch2.0/Conv" -> "205 /stage3/stage3.1/branch2/branch2.2/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"205 /stage3/stage3.1/branch2/branch2.2/Relu" -> "206 QuantizeLinear_/stage3/stage3.1/branch2/branch2.2/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=solid]; +"206 QuantizeLinear_/stage3/stage3.1/branch2/branch2.2/Relu_output_0_1" -> "207 DequantizeLinear_/stage3/stage3.1/branch2/branch2.2/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=dashed]; +"207 DequantizeLinear_/stage3/stage3.1/branch2/branch2.2/Relu_output_0_1" -> "210 /stage3/stage3.1/branch2/branch2.3/Conv" [label="[-1, 116, -1, -1]", style=solid]; "208 QuantizeLinear_onnx^^Conv_1202_1" -> "209 DequantizeLinear_onnx^^Conv_1202_1" [label="[116, 1, 3, 3]", style=dashed]; "209 DequantizeLinear_onnx^^Conv_1202_1" -> "210 /stage3/stage3.1/branch2/branch2.3/Conv" [label="[116, 1, 3, 3]", style=solid]; -"210 /stage3/stage3.1/branch2/branch2.3/Conv" -> "211 QuantizeLinear_/stage3/stage3.1/branch2/branch2.3/Conv_output_0_1" [label="[]", style=solid]; -"211 QuantizeLinear_/stage3/stage3.1/branch2/branch2.3/Conv_output_0_1" -> "212 DequantizeLinear_/stage3/stage3.1/branch2/branch2.3/Conv_output_0_1" [label="[]", style=dashed]; -"212 DequantizeLinear_/stage3/stage3.1/branch2/branch2.3/Conv_output_0_1" -> "215 /stage3/stage3.1/branch2/branch2.5/Conv" [label="[]", style=solid]; +"210 /stage3/stage3.1/branch2/branch2.3/Conv" -> "211 QuantizeLinear_/stage3/stage3.1/branch2/branch2.3/Conv_output_0_1" [label="[-1, 116, -1, -1]", style=solid]; +"211 QuantizeLinear_/stage3/stage3.1/branch2/branch2.3/Conv_output_0_1" -> "212 DequantizeLinear_/stage3/stage3.1/branch2/branch2.3/Conv_output_0_1" [label="[-1, 116, -1, -1]", style=dashed]; +"212 DequantizeLinear_/stage3/stage3.1/branch2/branch2.3/Conv_output_0_1" -> "215 /stage3/stage3.1/branch2/branch2.5/Conv" [label="[-1, 116, -1, -1]", style=solid]; "213 QuantizeLinear_onnx^^Conv_1205_1" -> "214 DequantizeLinear_onnx^^Conv_1205_1" [label="[116, 116, 1, 1]", style=dashed]; "214 DequantizeLinear_onnx^^Conv_1205_1" -> "215 /stage3/stage3.1/branch2/branch2.5/Conv" [label="[116, 116, 1, 1]", style=solid]; -"215 /stage3/stage3.1/branch2/branch2.5/Conv" -> "216 /stage3/stage3.1/branch2/branch2.7/Relu" [label="[]", style=solid]; -"216 /stage3/stage3.1/branch2/branch2.7/Relu" -> "217 QuantizeLinear_/stage3/stage3.1/branch2/branch2.7/Relu_output_0_1" [label="[]", style=solid]; -"217 QuantizeLinear_/stage3/stage3.1/branch2/branch2.7/Relu_output_0_1" -> "218 DequantizeLinear_/stage3/stage3.1/branch2/branch2.7/Relu_output_0_1" [label="[]", style=dashed]; -"218 DequantizeLinear_/stage3/stage3.1/branch2/branch2.7/Relu_output_0_1" -> "219 /stage3/stage3.1/Concat" [label="[]", style=solid]; -"219 /stage3/stage3.1/Concat" -> "221 /stage3/stage3.1/Reshape" [label="[]", style=solid]; +"215 /stage3/stage3.1/branch2/branch2.5/Conv" -> "216 /stage3/stage3.1/branch2/branch2.7/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"216 /stage3/stage3.1/branch2/branch2.7/Relu" -> "217 QuantizeLinear_/stage3/stage3.1/branch2/branch2.7/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=solid]; +"217 QuantizeLinear_/stage3/stage3.1/branch2/branch2.7/Relu_output_0_1" -> "218 DequantizeLinear_/stage3/stage3.1/branch2/branch2.7/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=dashed]; +"218 DequantizeLinear_/stage3/stage3.1/branch2/branch2.7/Relu_output_0_1" -> "219 /stage3/stage3.1/Concat" [label="[-1, 116, -1, -1]", style=solid]; +"219 /stage3/stage3.1/Concat" -> "221 /stage3/stage3.1/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "220 /stage3/stage3.1/Constant_6" -> "221 /stage3/stage3.1/Reshape" [label="[5]", style=dashed]; "221 /stage3/stage3.1/Reshape" -> "222 /stage3/stage3.1/Transpose" [label="[1, 2, 116, 14, 14]", style=solid]; "222 /stage3/stage3.1/Transpose" -> "224 /stage3/stage3.1/Reshape_1" [label="[1, 116, 2, 14, 14]", style=solid]; @@ -872,28 +872,28 @@ strict digraph { "233 /stage3/stage3.2/Constant_4" -> "234 /stage3/stage3.2/Mul" [label="[1]", style=dashed]; "234 /stage3/stage3.2/Mul" -> "235 /stage3/stage3.2/Slice" [label="[1]", style=dashed]; "234 /stage3/stage3.2/Mul" -> "238 /stage3/stage3.2/Slice_1" [label="[1]", style=dashed]; -"235 /stage3/stage3.2/Slice" -> "256 /stage3/stage3.2/Concat" [label="[]", style=solid]; +"235 /stage3/stage3.2/Slice" -> "256 /stage3/stage3.2/Concat" [label="[-1, -1, -1, -1]", style=solid]; "236 /stage3/stage3.2/Constant_5" -> "237 /stage3/stage3.2/Mul_1" [label="[1]", style=dashed]; "237 /stage3/stage3.2/Mul_1" -> "238 /stage3/stage3.2/Slice_1" [label="[1]", style=dashed]; -"238 /stage3/stage3.2/Slice_1" -> "241 /stage3/stage3.2/branch2/branch2.0/Conv" [label="[]", style=solid]; +"238 /stage3/stage3.2/Slice_1" -> "241 /stage3/stage3.2/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; "239 QuantizeLinear_onnx^^Conv_1208_1" -> "240 DequantizeLinear_onnx^^Conv_1208_1" [label="[116, 116, 1, 1]", style=dashed]; "240 DequantizeLinear_onnx^^Conv_1208_1" -> "241 /stage3/stage3.2/branch2/branch2.0/Conv" [label="[116, 116, 1, 1]", style=solid]; -"241 /stage3/stage3.2/branch2/branch2.0/Conv" -> "242 /stage3/stage3.2/branch2/branch2.2/Relu" [label="[]", style=solid]; -"242 /stage3/stage3.2/branch2/branch2.2/Relu" -> "243 QuantizeLinear_/stage3/stage3.2/branch2/branch2.2/Relu_output_0_1" [label="[]", style=solid]; -"243 QuantizeLinear_/stage3/stage3.2/branch2/branch2.2/Relu_output_0_1" -> "244 DequantizeLinear_/stage3/stage3.2/branch2/branch2.2/Relu_output_0_1" [label="[]", style=dashed]; -"244 DequantizeLinear_/stage3/stage3.2/branch2/branch2.2/Relu_output_0_1" -> "247 /stage3/stage3.2/branch2/branch2.3/Conv" [label="[]", style=solid]; +"241 /stage3/stage3.2/branch2/branch2.0/Conv" -> "242 /stage3/stage3.2/branch2/branch2.2/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"242 /stage3/stage3.2/branch2/branch2.2/Relu" -> "243 QuantizeLinear_/stage3/stage3.2/branch2/branch2.2/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=solid]; +"243 QuantizeLinear_/stage3/stage3.2/branch2/branch2.2/Relu_output_0_1" -> "244 DequantizeLinear_/stage3/stage3.2/branch2/branch2.2/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=dashed]; +"244 DequantizeLinear_/stage3/stage3.2/branch2/branch2.2/Relu_output_0_1" -> "247 /stage3/stage3.2/branch2/branch2.3/Conv" [label="[-1, 116, -1, -1]", style=solid]; "245 QuantizeLinear_onnx^^Conv_1211_1" -> "246 DequantizeLinear_onnx^^Conv_1211_1" [label="[116, 1, 3, 3]", style=dashed]; "246 DequantizeLinear_onnx^^Conv_1211_1" -> "247 /stage3/stage3.2/branch2/branch2.3/Conv" [label="[116, 1, 3, 3]", style=solid]; -"247 /stage3/stage3.2/branch2/branch2.3/Conv" -> "248 QuantizeLinear_/stage3/stage3.2/branch2/branch2.3/Conv_output_0_1" [label="[]", style=solid]; -"248 QuantizeLinear_/stage3/stage3.2/branch2/branch2.3/Conv_output_0_1" -> "249 DequantizeLinear_/stage3/stage3.2/branch2/branch2.3/Conv_output_0_1" [label="[]", style=dashed]; -"249 DequantizeLinear_/stage3/stage3.2/branch2/branch2.3/Conv_output_0_1" -> "252 /stage3/stage3.2/branch2/branch2.5/Conv" [label="[]", style=solid]; +"247 /stage3/stage3.2/branch2/branch2.3/Conv" -> "248 QuantizeLinear_/stage3/stage3.2/branch2/branch2.3/Conv_output_0_1" [label="[-1, 116, -1, -1]", style=solid]; +"248 QuantizeLinear_/stage3/stage3.2/branch2/branch2.3/Conv_output_0_1" -> "249 DequantizeLinear_/stage3/stage3.2/branch2/branch2.3/Conv_output_0_1" [label="[-1, 116, -1, -1]", style=dashed]; +"249 DequantizeLinear_/stage3/stage3.2/branch2/branch2.3/Conv_output_0_1" -> "252 /stage3/stage3.2/branch2/branch2.5/Conv" [label="[-1, 116, -1, -1]", style=solid]; "250 QuantizeLinear_onnx^^Conv_1214_1" -> "251 DequantizeLinear_onnx^^Conv_1214_1" [label="[116, 116, 1, 1]", style=dashed]; "251 DequantizeLinear_onnx^^Conv_1214_1" -> "252 /stage3/stage3.2/branch2/branch2.5/Conv" [label="[116, 116, 1, 1]", style=solid]; -"252 /stage3/stage3.2/branch2/branch2.5/Conv" -> "253 /stage3/stage3.2/branch2/branch2.7/Relu" [label="[]", style=solid]; -"253 /stage3/stage3.2/branch2/branch2.7/Relu" -> "254 QuantizeLinear_/stage3/stage3.2/branch2/branch2.7/Relu_output_0_1" [label="[]", style=solid]; -"254 QuantizeLinear_/stage3/stage3.2/branch2/branch2.7/Relu_output_0_1" -> "255 DequantizeLinear_/stage3/stage3.2/branch2/branch2.7/Relu_output_0_1" [label="[]", style=dashed]; -"255 DequantizeLinear_/stage3/stage3.2/branch2/branch2.7/Relu_output_0_1" -> "256 /stage3/stage3.2/Concat" [label="[]", style=solid]; -"256 /stage3/stage3.2/Concat" -> "258 /stage3/stage3.2/Reshape" [label="[]", style=solid]; +"252 /stage3/stage3.2/branch2/branch2.5/Conv" -> "253 /stage3/stage3.2/branch2/branch2.7/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"253 /stage3/stage3.2/branch2/branch2.7/Relu" -> "254 QuantizeLinear_/stage3/stage3.2/branch2/branch2.7/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=solid]; +"254 QuantizeLinear_/stage3/stage3.2/branch2/branch2.7/Relu_output_0_1" -> "255 DequantizeLinear_/stage3/stage3.2/branch2/branch2.7/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=dashed]; +"255 DequantizeLinear_/stage3/stage3.2/branch2/branch2.7/Relu_output_0_1" -> "256 /stage3/stage3.2/Concat" [label="[-1, 116, -1, -1]", style=solid]; +"256 /stage3/stage3.2/Concat" -> "258 /stage3/stage3.2/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "257 /stage3/stage3.2/Constant_6" -> "258 /stage3/stage3.2/Reshape" [label="[5]", style=dashed]; "258 /stage3/stage3.2/Reshape" -> "259 /stage3/stage3.2/Transpose" [label="[1, 2, 116, 14, 14]", style=solid]; "259 /stage3/stage3.2/Transpose" -> "261 /stage3/stage3.2/Reshape_1" [label="[1, 116, 2, 14, 14]", style=solid]; @@ -915,28 +915,28 @@ strict digraph { "270 /stage3/stage3.3/Constant_4" -> "271 /stage3/stage3.3/Mul" [label="[1]", style=dashed]; "271 /stage3/stage3.3/Mul" -> "272 /stage3/stage3.3/Slice" [label="[1]", style=dashed]; "271 /stage3/stage3.3/Mul" -> "275 /stage3/stage3.3/Slice_1" [label="[1]", style=dashed]; -"272 /stage3/stage3.3/Slice" -> "293 /stage3/stage3.3/Concat" [label="[]", style=solid]; +"272 /stage3/stage3.3/Slice" -> "293 /stage3/stage3.3/Concat" [label="[-1, -1, -1, -1]", style=solid]; "273 /stage3/stage3.3/Constant_5" -> "274 /stage3/stage3.3/Mul_1" [label="[1]", style=dashed]; "274 /stage3/stage3.3/Mul_1" -> "275 /stage3/stage3.3/Slice_1" [label="[1]", style=dashed]; -"275 /stage3/stage3.3/Slice_1" -> "278 /stage3/stage3.3/branch2/branch2.0/Conv" [label="[]", style=solid]; +"275 /stage3/stage3.3/Slice_1" -> "278 /stage3/stage3.3/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; "276 QuantizeLinear_onnx^^Conv_1217_1" -> "277 DequantizeLinear_onnx^^Conv_1217_1" [label="[116, 116, 1, 1]", style=dashed]; "277 DequantizeLinear_onnx^^Conv_1217_1" -> "278 /stage3/stage3.3/branch2/branch2.0/Conv" [label="[116, 116, 1, 1]", style=solid]; -"278 /stage3/stage3.3/branch2/branch2.0/Conv" -> "279 /stage3/stage3.3/branch2/branch2.2/Relu" [label="[]", style=solid]; -"279 /stage3/stage3.3/branch2/branch2.2/Relu" -> "280 QuantizeLinear_/stage3/stage3.3/branch2/branch2.2/Relu_output_0_1" [label="[]", style=solid]; -"280 QuantizeLinear_/stage3/stage3.3/branch2/branch2.2/Relu_output_0_1" -> "281 DequantizeLinear_/stage3/stage3.3/branch2/branch2.2/Relu_output_0_1" [label="[]", style=dashed]; -"281 DequantizeLinear_/stage3/stage3.3/branch2/branch2.2/Relu_output_0_1" -> "284 /stage3/stage3.3/branch2/branch2.3/Conv" [label="[]", style=solid]; +"278 /stage3/stage3.3/branch2/branch2.0/Conv" -> "279 /stage3/stage3.3/branch2/branch2.2/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"279 /stage3/stage3.3/branch2/branch2.2/Relu" -> "280 QuantizeLinear_/stage3/stage3.3/branch2/branch2.2/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=solid]; +"280 QuantizeLinear_/stage3/stage3.3/branch2/branch2.2/Relu_output_0_1" -> "281 DequantizeLinear_/stage3/stage3.3/branch2/branch2.2/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=dashed]; +"281 DequantizeLinear_/stage3/stage3.3/branch2/branch2.2/Relu_output_0_1" -> "284 /stage3/stage3.3/branch2/branch2.3/Conv" [label="[-1, 116, -1, -1]", style=solid]; "282 QuantizeLinear_onnx^^Conv_1220_1" -> "283 DequantizeLinear_onnx^^Conv_1220_1" [label="[116, 1, 3, 3]", style=dashed]; "283 DequantizeLinear_onnx^^Conv_1220_1" -> "284 /stage3/stage3.3/branch2/branch2.3/Conv" [label="[116, 1, 3, 3]", style=solid]; -"284 /stage3/stage3.3/branch2/branch2.3/Conv" -> "285 QuantizeLinear_/stage3/stage3.3/branch2/branch2.3/Conv_output_0_1" [label="[]", style=solid]; -"285 QuantizeLinear_/stage3/stage3.3/branch2/branch2.3/Conv_output_0_1" -> "286 DequantizeLinear_/stage3/stage3.3/branch2/branch2.3/Conv_output_0_1" [label="[]", style=dashed]; -"286 DequantizeLinear_/stage3/stage3.3/branch2/branch2.3/Conv_output_0_1" -> "289 /stage3/stage3.3/branch2/branch2.5/Conv" [label="[]", style=solid]; +"284 /stage3/stage3.3/branch2/branch2.3/Conv" -> "285 QuantizeLinear_/stage3/stage3.3/branch2/branch2.3/Conv_output_0_1" [label="[-1, 116, -1, -1]", style=solid]; +"285 QuantizeLinear_/stage3/stage3.3/branch2/branch2.3/Conv_output_0_1" -> "286 DequantizeLinear_/stage3/stage3.3/branch2/branch2.3/Conv_output_0_1" [label="[-1, 116, -1, -1]", style=dashed]; +"286 DequantizeLinear_/stage3/stage3.3/branch2/branch2.3/Conv_output_0_1" -> "289 /stage3/stage3.3/branch2/branch2.5/Conv" [label="[-1, 116, -1, -1]", style=solid]; "287 QuantizeLinear_onnx^^Conv_1223_1" -> "288 DequantizeLinear_onnx^^Conv_1223_1" [label="[116, 116, 1, 1]", style=dashed]; "288 DequantizeLinear_onnx^^Conv_1223_1" -> "289 /stage3/stage3.3/branch2/branch2.5/Conv" [label="[116, 116, 1, 1]", style=solid]; -"289 /stage3/stage3.3/branch2/branch2.5/Conv" -> "290 /stage3/stage3.3/branch2/branch2.7/Relu" [label="[]", style=solid]; -"290 /stage3/stage3.3/branch2/branch2.7/Relu" -> "291 QuantizeLinear_/stage3/stage3.3/branch2/branch2.7/Relu_output_0_1" [label="[]", style=solid]; -"291 QuantizeLinear_/stage3/stage3.3/branch2/branch2.7/Relu_output_0_1" -> "292 DequantizeLinear_/stage3/stage3.3/branch2/branch2.7/Relu_output_0_1" [label="[]", style=dashed]; -"292 DequantizeLinear_/stage3/stage3.3/branch2/branch2.7/Relu_output_0_1" -> "293 /stage3/stage3.3/Concat" [label="[]", style=solid]; -"293 /stage3/stage3.3/Concat" -> "295 /stage3/stage3.3/Reshape" [label="[]", style=solid]; +"289 /stage3/stage3.3/branch2/branch2.5/Conv" -> "290 /stage3/stage3.3/branch2/branch2.7/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"290 /stage3/stage3.3/branch2/branch2.7/Relu" -> "291 QuantizeLinear_/stage3/stage3.3/branch2/branch2.7/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=solid]; +"291 QuantizeLinear_/stage3/stage3.3/branch2/branch2.7/Relu_output_0_1" -> "292 DequantizeLinear_/stage3/stage3.3/branch2/branch2.7/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=dashed]; +"292 DequantizeLinear_/stage3/stage3.3/branch2/branch2.7/Relu_output_0_1" -> "293 /stage3/stage3.3/Concat" [label="[-1, 116, -1, -1]", style=solid]; +"293 /stage3/stage3.3/Concat" -> "295 /stage3/stage3.3/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "294 /stage3/stage3.3/Constant_6" -> "295 /stage3/stage3.3/Reshape" [label="[5]", style=dashed]; "295 /stage3/stage3.3/Reshape" -> "296 /stage3/stage3.3/Transpose" [label="[1, 2, 116, 14, 14]", style=solid]; "296 /stage3/stage3.3/Transpose" -> "298 /stage3/stage3.3/Reshape_1" [label="[1, 116, 2, 14, 14]", style=solid]; @@ -958,28 +958,28 @@ strict digraph { "307 /stage3/stage3.4/Constant_4" -> "308 /stage3/stage3.4/Mul" [label="[1]", style=dashed]; "308 /stage3/stage3.4/Mul" -> "309 /stage3/stage3.4/Slice" [label="[1]", style=dashed]; "308 /stage3/stage3.4/Mul" -> "312 /stage3/stage3.4/Slice_1" [label="[1]", style=dashed]; -"309 /stage3/stage3.4/Slice" -> "330 /stage3/stage3.4/Concat" [label="[]", style=solid]; +"309 /stage3/stage3.4/Slice" -> "330 /stage3/stage3.4/Concat" [label="[-1, -1, -1, -1]", style=solid]; "310 /stage3/stage3.4/Constant_5" -> "311 /stage3/stage3.4/Mul_1" [label="[1]", style=dashed]; "311 /stage3/stage3.4/Mul_1" -> "312 /stage3/stage3.4/Slice_1" [label="[1]", style=dashed]; -"312 /stage3/stage3.4/Slice_1" -> "315 /stage3/stage3.4/branch2/branch2.0/Conv" [label="[]", style=solid]; +"312 /stage3/stage3.4/Slice_1" -> "315 /stage3/stage3.4/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; "313 QuantizeLinear_onnx^^Conv_1226_1" -> "314 DequantizeLinear_onnx^^Conv_1226_1" [label="[116, 116, 1, 1]", style=dashed]; "314 DequantizeLinear_onnx^^Conv_1226_1" -> "315 /stage3/stage3.4/branch2/branch2.0/Conv" [label="[116, 116, 1, 1]", style=solid]; -"315 /stage3/stage3.4/branch2/branch2.0/Conv" -> "316 /stage3/stage3.4/branch2/branch2.2/Relu" [label="[]", style=solid]; -"316 /stage3/stage3.4/branch2/branch2.2/Relu" -> "317 QuantizeLinear_/stage3/stage3.4/branch2/branch2.2/Relu_output_0_1" [label="[]", style=solid]; -"317 QuantizeLinear_/stage3/stage3.4/branch2/branch2.2/Relu_output_0_1" -> "318 DequantizeLinear_/stage3/stage3.4/branch2/branch2.2/Relu_output_0_1" [label="[]", style=dashed]; -"318 DequantizeLinear_/stage3/stage3.4/branch2/branch2.2/Relu_output_0_1" -> "321 /stage3/stage3.4/branch2/branch2.3/Conv" [label="[]", style=solid]; +"315 /stage3/stage3.4/branch2/branch2.0/Conv" -> "316 /stage3/stage3.4/branch2/branch2.2/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"316 /stage3/stage3.4/branch2/branch2.2/Relu" -> "317 QuantizeLinear_/stage3/stage3.4/branch2/branch2.2/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=solid]; +"317 QuantizeLinear_/stage3/stage3.4/branch2/branch2.2/Relu_output_0_1" -> "318 DequantizeLinear_/stage3/stage3.4/branch2/branch2.2/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=dashed]; +"318 DequantizeLinear_/stage3/stage3.4/branch2/branch2.2/Relu_output_0_1" -> "321 /stage3/stage3.4/branch2/branch2.3/Conv" [label="[-1, 116, -1, -1]", style=solid]; "319 QuantizeLinear_onnx^^Conv_1229_1" -> "320 DequantizeLinear_onnx^^Conv_1229_1" [label="[116, 1, 3, 3]", style=dashed]; "320 DequantizeLinear_onnx^^Conv_1229_1" -> "321 /stage3/stage3.4/branch2/branch2.3/Conv" [label="[116, 1, 3, 3]", style=solid]; -"321 /stage3/stage3.4/branch2/branch2.3/Conv" -> "322 QuantizeLinear_/stage3/stage3.4/branch2/branch2.3/Conv_output_0_1" [label="[]", style=solid]; -"322 QuantizeLinear_/stage3/stage3.4/branch2/branch2.3/Conv_output_0_1" -> "323 DequantizeLinear_/stage3/stage3.4/branch2/branch2.3/Conv_output_0_1" [label="[]", style=dashed]; -"323 DequantizeLinear_/stage3/stage3.4/branch2/branch2.3/Conv_output_0_1" -> "326 /stage3/stage3.4/branch2/branch2.5/Conv" [label="[]", style=solid]; +"321 /stage3/stage3.4/branch2/branch2.3/Conv" -> "322 QuantizeLinear_/stage3/stage3.4/branch2/branch2.3/Conv_output_0_1" [label="[-1, 116, -1, -1]", style=solid]; +"322 QuantizeLinear_/stage3/stage3.4/branch2/branch2.3/Conv_output_0_1" -> "323 DequantizeLinear_/stage3/stage3.4/branch2/branch2.3/Conv_output_0_1" [label="[-1, 116, -1, -1]", style=dashed]; +"323 DequantizeLinear_/stage3/stage3.4/branch2/branch2.3/Conv_output_0_1" -> "326 /stage3/stage3.4/branch2/branch2.5/Conv" [label="[-1, 116, -1, -1]", style=solid]; "324 QuantizeLinear_onnx^^Conv_1232_1" -> "325 DequantizeLinear_onnx^^Conv_1232_1" [label="[116, 116, 1, 1]", style=dashed]; "325 DequantizeLinear_onnx^^Conv_1232_1" -> "326 /stage3/stage3.4/branch2/branch2.5/Conv" [label="[116, 116, 1, 1]", style=solid]; -"326 /stage3/stage3.4/branch2/branch2.5/Conv" -> "327 /stage3/stage3.4/branch2/branch2.7/Relu" [label="[]", style=solid]; -"327 /stage3/stage3.4/branch2/branch2.7/Relu" -> "328 QuantizeLinear_/stage3/stage3.4/branch2/branch2.7/Relu_output_0_1" [label="[]", style=solid]; -"328 QuantizeLinear_/stage3/stage3.4/branch2/branch2.7/Relu_output_0_1" -> "329 DequantizeLinear_/stage3/stage3.4/branch2/branch2.7/Relu_output_0_1" [label="[]", style=dashed]; -"329 DequantizeLinear_/stage3/stage3.4/branch2/branch2.7/Relu_output_0_1" -> "330 /stage3/stage3.4/Concat" [label="[]", style=solid]; -"330 /stage3/stage3.4/Concat" -> "332 /stage3/stage3.4/Reshape" [label="[]", style=solid]; +"326 /stage3/stage3.4/branch2/branch2.5/Conv" -> "327 /stage3/stage3.4/branch2/branch2.7/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"327 /stage3/stage3.4/branch2/branch2.7/Relu" -> "328 QuantizeLinear_/stage3/stage3.4/branch2/branch2.7/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=solid]; +"328 QuantizeLinear_/stage3/stage3.4/branch2/branch2.7/Relu_output_0_1" -> "329 DequantizeLinear_/stage3/stage3.4/branch2/branch2.7/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=dashed]; +"329 DequantizeLinear_/stage3/stage3.4/branch2/branch2.7/Relu_output_0_1" -> "330 /stage3/stage3.4/Concat" [label="[-1, 116, -1, -1]", style=solid]; +"330 /stage3/stage3.4/Concat" -> "332 /stage3/stage3.4/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "331 /stage3/stage3.4/Constant_6" -> "332 /stage3/stage3.4/Reshape" [label="[5]", style=dashed]; "332 /stage3/stage3.4/Reshape" -> "333 /stage3/stage3.4/Transpose" [label="[1, 2, 116, 14, 14]", style=solid]; "333 /stage3/stage3.4/Transpose" -> "335 /stage3/stage3.4/Reshape_1" [label="[1, 116, 2, 14, 14]", style=solid]; @@ -1001,28 +1001,28 @@ strict digraph { "344 /stage3/stage3.5/Constant_4" -> "345 /stage3/stage3.5/Mul" [label="[1]", style=dashed]; "345 /stage3/stage3.5/Mul" -> "346 /stage3/stage3.5/Slice" [label="[1]", style=dashed]; "345 /stage3/stage3.5/Mul" -> "349 /stage3/stage3.5/Slice_1" [label="[1]", style=dashed]; -"346 /stage3/stage3.5/Slice" -> "367 /stage3/stage3.5/Concat" [label="[]", style=solid]; +"346 /stage3/stage3.5/Slice" -> "367 /stage3/stage3.5/Concat" [label="[-1, -1, -1, -1]", style=solid]; "347 /stage3/stage3.5/Constant_5" -> "348 /stage3/stage3.5/Mul_1" [label="[1]", style=dashed]; "348 /stage3/stage3.5/Mul_1" -> "349 /stage3/stage3.5/Slice_1" [label="[1]", style=dashed]; -"349 /stage3/stage3.5/Slice_1" -> "352 /stage3/stage3.5/branch2/branch2.0/Conv" [label="[]", style=solid]; +"349 /stage3/stage3.5/Slice_1" -> "352 /stage3/stage3.5/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; "350 QuantizeLinear_onnx^^Conv_1235_1" -> "351 DequantizeLinear_onnx^^Conv_1235_1" [label="[116, 116, 1, 1]", style=dashed]; "351 DequantizeLinear_onnx^^Conv_1235_1" -> "352 /stage3/stage3.5/branch2/branch2.0/Conv" [label="[116, 116, 1, 1]", style=solid]; -"352 /stage3/stage3.5/branch2/branch2.0/Conv" -> "353 /stage3/stage3.5/branch2/branch2.2/Relu" [label="[]", style=solid]; -"353 /stage3/stage3.5/branch2/branch2.2/Relu" -> "354 QuantizeLinear_/stage3/stage3.5/branch2/branch2.2/Relu_output_0_1" [label="[]", style=solid]; -"354 QuantizeLinear_/stage3/stage3.5/branch2/branch2.2/Relu_output_0_1" -> "355 DequantizeLinear_/stage3/stage3.5/branch2/branch2.2/Relu_output_0_1" [label="[]", style=dashed]; -"355 DequantizeLinear_/stage3/stage3.5/branch2/branch2.2/Relu_output_0_1" -> "358 /stage3/stage3.5/branch2/branch2.3/Conv" [label="[]", style=solid]; +"352 /stage3/stage3.5/branch2/branch2.0/Conv" -> "353 /stage3/stage3.5/branch2/branch2.2/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"353 /stage3/stage3.5/branch2/branch2.2/Relu" -> "354 QuantizeLinear_/stage3/stage3.5/branch2/branch2.2/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=solid]; +"354 QuantizeLinear_/stage3/stage3.5/branch2/branch2.2/Relu_output_0_1" -> "355 DequantizeLinear_/stage3/stage3.5/branch2/branch2.2/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=dashed]; +"355 DequantizeLinear_/stage3/stage3.5/branch2/branch2.2/Relu_output_0_1" -> "358 /stage3/stage3.5/branch2/branch2.3/Conv" [label="[-1, 116, -1, -1]", style=solid]; "356 QuantizeLinear_onnx^^Conv_1238_1" -> "357 DequantizeLinear_onnx^^Conv_1238_1" [label="[116, 1, 3, 3]", style=dashed]; "357 DequantizeLinear_onnx^^Conv_1238_1" -> "358 /stage3/stage3.5/branch2/branch2.3/Conv" [label="[116, 1, 3, 3]", style=solid]; -"358 /stage3/stage3.5/branch2/branch2.3/Conv" -> "359 QuantizeLinear_/stage3/stage3.5/branch2/branch2.3/Conv_output_0_1" [label="[]", style=solid]; -"359 QuantizeLinear_/stage3/stage3.5/branch2/branch2.3/Conv_output_0_1" -> "360 DequantizeLinear_/stage3/stage3.5/branch2/branch2.3/Conv_output_0_1" [label="[]", style=dashed]; -"360 DequantizeLinear_/stage3/stage3.5/branch2/branch2.3/Conv_output_0_1" -> "363 /stage3/stage3.5/branch2/branch2.5/Conv" [label="[]", style=solid]; +"358 /stage3/stage3.5/branch2/branch2.3/Conv" -> "359 QuantizeLinear_/stage3/stage3.5/branch2/branch2.3/Conv_output_0_1" [label="[-1, 116, -1, -1]", style=solid]; +"359 QuantizeLinear_/stage3/stage3.5/branch2/branch2.3/Conv_output_0_1" -> "360 DequantizeLinear_/stage3/stage3.5/branch2/branch2.3/Conv_output_0_1" [label="[-1, 116, -1, -1]", style=dashed]; +"360 DequantizeLinear_/stage3/stage3.5/branch2/branch2.3/Conv_output_0_1" -> "363 /stage3/stage3.5/branch2/branch2.5/Conv" [label="[-1, 116, -1, -1]", style=solid]; "361 QuantizeLinear_onnx^^Conv_1241_1" -> "362 DequantizeLinear_onnx^^Conv_1241_1" [label="[116, 116, 1, 1]", style=dashed]; "362 DequantizeLinear_onnx^^Conv_1241_1" -> "363 /stage3/stage3.5/branch2/branch2.5/Conv" [label="[116, 116, 1, 1]", style=solid]; -"363 /stage3/stage3.5/branch2/branch2.5/Conv" -> "364 /stage3/stage3.5/branch2/branch2.7/Relu" [label="[]", style=solid]; -"364 /stage3/stage3.5/branch2/branch2.7/Relu" -> "365 QuantizeLinear_/stage3/stage3.5/branch2/branch2.7/Relu_output_0_1" [label="[]", style=solid]; -"365 QuantizeLinear_/stage3/stage3.5/branch2/branch2.7/Relu_output_0_1" -> "366 DequantizeLinear_/stage3/stage3.5/branch2/branch2.7/Relu_output_0_1" [label="[]", style=dashed]; -"366 DequantizeLinear_/stage3/stage3.5/branch2/branch2.7/Relu_output_0_1" -> "367 /stage3/stage3.5/Concat" [label="[]", style=solid]; -"367 /stage3/stage3.5/Concat" -> "369 /stage3/stage3.5/Reshape" [label="[]", style=solid]; +"363 /stage3/stage3.5/branch2/branch2.5/Conv" -> "364 /stage3/stage3.5/branch2/branch2.7/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"364 /stage3/stage3.5/branch2/branch2.7/Relu" -> "365 QuantizeLinear_/stage3/stage3.5/branch2/branch2.7/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=solid]; +"365 QuantizeLinear_/stage3/stage3.5/branch2/branch2.7/Relu_output_0_1" -> "366 DequantizeLinear_/stage3/stage3.5/branch2/branch2.7/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=dashed]; +"366 DequantizeLinear_/stage3/stage3.5/branch2/branch2.7/Relu_output_0_1" -> "367 /stage3/stage3.5/Concat" [label="[-1, 116, -1, -1]", style=solid]; +"367 /stage3/stage3.5/Concat" -> "369 /stage3/stage3.5/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "368 /stage3/stage3.5/Constant_6" -> "369 /stage3/stage3.5/Reshape" [label="[5]", style=dashed]; "369 /stage3/stage3.5/Reshape" -> "370 /stage3/stage3.5/Transpose" [label="[1, 2, 116, 14, 14]", style=solid]; "370 /stage3/stage3.5/Transpose" -> "372 /stage3/stage3.5/Reshape_1" [label="[1, 116, 2, 14, 14]", style=solid]; @@ -1044,28 +1044,28 @@ strict digraph { "381 /stage3/stage3.6/Constant_4" -> "382 /stage3/stage3.6/Mul" [label="[1]", style=dashed]; "382 /stage3/stage3.6/Mul" -> "383 /stage3/stage3.6/Slice" [label="[1]", style=dashed]; "382 /stage3/stage3.6/Mul" -> "386 /stage3/stage3.6/Slice_1" [label="[1]", style=dashed]; -"383 /stage3/stage3.6/Slice" -> "404 /stage3/stage3.6/Concat" [label="[]", style=solid]; +"383 /stage3/stage3.6/Slice" -> "404 /stage3/stage3.6/Concat" [label="[-1, -1, -1, -1]", style=solid]; "384 /stage3/stage3.6/Constant_5" -> "385 /stage3/stage3.6/Mul_1" [label="[1]", style=dashed]; "385 /stage3/stage3.6/Mul_1" -> "386 /stage3/stage3.6/Slice_1" [label="[1]", style=dashed]; -"386 /stage3/stage3.6/Slice_1" -> "389 /stage3/stage3.6/branch2/branch2.0/Conv" [label="[]", style=solid]; +"386 /stage3/stage3.6/Slice_1" -> "389 /stage3/stage3.6/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; "387 QuantizeLinear_onnx^^Conv_1244_1" -> "388 DequantizeLinear_onnx^^Conv_1244_1" [label="[116, 116, 1, 1]", style=dashed]; "388 DequantizeLinear_onnx^^Conv_1244_1" -> "389 /stage3/stage3.6/branch2/branch2.0/Conv" [label="[116, 116, 1, 1]", style=solid]; -"389 /stage3/stage3.6/branch2/branch2.0/Conv" -> "390 /stage3/stage3.6/branch2/branch2.2/Relu" [label="[]", style=solid]; -"390 /stage3/stage3.6/branch2/branch2.2/Relu" -> "391 QuantizeLinear_/stage3/stage3.6/branch2/branch2.2/Relu_output_0_1" [label="[]", style=solid]; -"391 QuantizeLinear_/stage3/stage3.6/branch2/branch2.2/Relu_output_0_1" -> "392 DequantizeLinear_/stage3/stage3.6/branch2/branch2.2/Relu_output_0_1" [label="[]", style=dashed]; -"392 DequantizeLinear_/stage3/stage3.6/branch2/branch2.2/Relu_output_0_1" -> "395 /stage3/stage3.6/branch2/branch2.3/Conv" [label="[]", style=solid]; +"389 /stage3/stage3.6/branch2/branch2.0/Conv" -> "390 /stage3/stage3.6/branch2/branch2.2/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"390 /stage3/stage3.6/branch2/branch2.2/Relu" -> "391 QuantizeLinear_/stage3/stage3.6/branch2/branch2.2/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=solid]; +"391 QuantizeLinear_/stage3/stage3.6/branch2/branch2.2/Relu_output_0_1" -> "392 DequantizeLinear_/stage3/stage3.6/branch2/branch2.2/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=dashed]; +"392 DequantizeLinear_/stage3/stage3.6/branch2/branch2.2/Relu_output_0_1" -> "395 /stage3/stage3.6/branch2/branch2.3/Conv" [label="[-1, 116, -1, -1]", style=solid]; "393 QuantizeLinear_onnx^^Conv_1247_1" -> "394 DequantizeLinear_onnx^^Conv_1247_1" [label="[116, 1, 3, 3]", style=dashed]; "394 DequantizeLinear_onnx^^Conv_1247_1" -> "395 /stage3/stage3.6/branch2/branch2.3/Conv" [label="[116, 1, 3, 3]", style=solid]; -"395 /stage3/stage3.6/branch2/branch2.3/Conv" -> "396 QuantizeLinear_/stage3/stage3.6/branch2/branch2.3/Conv_output_0_1" [label="[]", style=solid]; -"396 QuantizeLinear_/stage3/stage3.6/branch2/branch2.3/Conv_output_0_1" -> "397 DequantizeLinear_/stage3/stage3.6/branch2/branch2.3/Conv_output_0_1" [label="[]", style=dashed]; -"397 DequantizeLinear_/stage3/stage3.6/branch2/branch2.3/Conv_output_0_1" -> "400 /stage3/stage3.6/branch2/branch2.5/Conv" [label="[]", style=solid]; +"395 /stage3/stage3.6/branch2/branch2.3/Conv" -> "396 QuantizeLinear_/stage3/stage3.6/branch2/branch2.3/Conv_output_0_1" [label="[-1, 116, -1, -1]", style=solid]; +"396 QuantizeLinear_/stage3/stage3.6/branch2/branch2.3/Conv_output_0_1" -> "397 DequantizeLinear_/stage3/stage3.6/branch2/branch2.3/Conv_output_0_1" [label="[-1, 116, -1, -1]", style=dashed]; +"397 DequantizeLinear_/stage3/stage3.6/branch2/branch2.3/Conv_output_0_1" -> "400 /stage3/stage3.6/branch2/branch2.5/Conv" [label="[-1, 116, -1, -1]", style=solid]; "398 QuantizeLinear_onnx^^Conv_1250_1" -> "399 DequantizeLinear_onnx^^Conv_1250_1" [label="[116, 116, 1, 1]", style=dashed]; "399 DequantizeLinear_onnx^^Conv_1250_1" -> "400 /stage3/stage3.6/branch2/branch2.5/Conv" [label="[116, 116, 1, 1]", style=solid]; -"400 /stage3/stage3.6/branch2/branch2.5/Conv" -> "401 /stage3/stage3.6/branch2/branch2.7/Relu" [label="[]", style=solid]; -"401 /stage3/stage3.6/branch2/branch2.7/Relu" -> "402 QuantizeLinear_/stage3/stage3.6/branch2/branch2.7/Relu_output_0_1" [label="[]", style=solid]; -"402 QuantizeLinear_/stage3/stage3.6/branch2/branch2.7/Relu_output_0_1" -> "403 DequantizeLinear_/stage3/stage3.6/branch2/branch2.7/Relu_output_0_1" [label="[]", style=dashed]; -"403 DequantizeLinear_/stage3/stage3.6/branch2/branch2.7/Relu_output_0_1" -> "404 /stage3/stage3.6/Concat" [label="[]", style=solid]; -"404 /stage3/stage3.6/Concat" -> "406 /stage3/stage3.6/Reshape" [label="[]", style=solid]; +"400 /stage3/stage3.6/branch2/branch2.5/Conv" -> "401 /stage3/stage3.6/branch2/branch2.7/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"401 /stage3/stage3.6/branch2/branch2.7/Relu" -> "402 QuantizeLinear_/stage3/stage3.6/branch2/branch2.7/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=solid]; +"402 QuantizeLinear_/stage3/stage3.6/branch2/branch2.7/Relu_output_0_1" -> "403 DequantizeLinear_/stage3/stage3.6/branch2/branch2.7/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=dashed]; +"403 DequantizeLinear_/stage3/stage3.6/branch2/branch2.7/Relu_output_0_1" -> "404 /stage3/stage3.6/Concat" [label="[-1, 116, -1, -1]", style=solid]; +"404 /stage3/stage3.6/Concat" -> "406 /stage3/stage3.6/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "405 /stage3/stage3.6/Constant_6" -> "406 /stage3/stage3.6/Reshape" [label="[5]", style=dashed]; "406 /stage3/stage3.6/Reshape" -> "407 /stage3/stage3.6/Transpose" [label="[1, 2, 116, 14, 14]", style=solid]; "407 /stage3/stage3.6/Transpose" -> "409 /stage3/stage3.6/Reshape_1" [label="[1, 116, 2, 14, 14]", style=solid]; @@ -1087,28 +1087,28 @@ strict digraph { "418 /stage3/stage3.7/Constant_4" -> "419 /stage3/stage3.7/Mul" [label="[1]", style=dashed]; "419 /stage3/stage3.7/Mul" -> "420 /stage3/stage3.7/Slice" [label="[1]", style=dashed]; "419 /stage3/stage3.7/Mul" -> "423 /stage3/stage3.7/Slice_1" [label="[1]", style=dashed]; -"420 /stage3/stage3.7/Slice" -> "441 /stage3/stage3.7/Concat" [label="[]", style=solid]; +"420 /stage3/stage3.7/Slice" -> "441 /stage3/stage3.7/Concat" [label="[-1, -1, -1, -1]", style=solid]; "421 /stage3/stage3.7/Constant_5" -> "422 /stage3/stage3.7/Mul_1" [label="[1]", style=dashed]; "422 /stage3/stage3.7/Mul_1" -> "423 /stage3/stage3.7/Slice_1" [label="[1]", style=dashed]; -"423 /stage3/stage3.7/Slice_1" -> "426 /stage3/stage3.7/branch2/branch2.0/Conv" [label="[]", style=solid]; +"423 /stage3/stage3.7/Slice_1" -> "426 /stage3/stage3.7/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; "424 QuantizeLinear_onnx^^Conv_1253_1" -> "425 DequantizeLinear_onnx^^Conv_1253_1" [label="[116, 116, 1, 1]", style=dashed]; "425 DequantizeLinear_onnx^^Conv_1253_1" -> "426 /stage3/stage3.7/branch2/branch2.0/Conv" [label="[116, 116, 1, 1]", style=solid]; -"426 /stage3/stage3.7/branch2/branch2.0/Conv" -> "427 /stage3/stage3.7/branch2/branch2.2/Relu" [label="[]", style=solid]; -"427 /stage3/stage3.7/branch2/branch2.2/Relu" -> "428 QuantizeLinear_/stage3/stage3.7/branch2/branch2.2/Relu_output_0_1" [label="[]", style=solid]; -"428 QuantizeLinear_/stage3/stage3.7/branch2/branch2.2/Relu_output_0_1" -> "429 DequantizeLinear_/stage3/stage3.7/branch2/branch2.2/Relu_output_0_1" [label="[]", style=dashed]; -"429 DequantizeLinear_/stage3/stage3.7/branch2/branch2.2/Relu_output_0_1" -> "432 /stage3/stage3.7/branch2/branch2.3/Conv" [label="[]", style=solid]; +"426 /stage3/stage3.7/branch2/branch2.0/Conv" -> "427 /stage3/stage3.7/branch2/branch2.2/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"427 /stage3/stage3.7/branch2/branch2.2/Relu" -> "428 QuantizeLinear_/stage3/stage3.7/branch2/branch2.2/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=solid]; +"428 QuantizeLinear_/stage3/stage3.7/branch2/branch2.2/Relu_output_0_1" -> "429 DequantizeLinear_/stage3/stage3.7/branch2/branch2.2/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=dashed]; +"429 DequantizeLinear_/stage3/stage3.7/branch2/branch2.2/Relu_output_0_1" -> "432 /stage3/stage3.7/branch2/branch2.3/Conv" [label="[-1, 116, -1, -1]", style=solid]; "430 QuantizeLinear_onnx^^Conv_1256_1" -> "431 DequantizeLinear_onnx^^Conv_1256_1" [label="[116, 1, 3, 3]", style=dashed]; "431 DequantizeLinear_onnx^^Conv_1256_1" -> "432 /stage3/stage3.7/branch2/branch2.3/Conv" [label="[116, 1, 3, 3]", style=solid]; -"432 /stage3/stage3.7/branch2/branch2.3/Conv" -> "433 QuantizeLinear_/stage3/stage3.7/branch2/branch2.3/Conv_output_0_1" [label="[]", style=solid]; -"433 QuantizeLinear_/stage3/stage3.7/branch2/branch2.3/Conv_output_0_1" -> "434 DequantizeLinear_/stage3/stage3.7/branch2/branch2.3/Conv_output_0_1" [label="[]", style=dashed]; -"434 DequantizeLinear_/stage3/stage3.7/branch2/branch2.3/Conv_output_0_1" -> "437 /stage3/stage3.7/branch2/branch2.5/Conv" [label="[]", style=solid]; +"432 /stage3/stage3.7/branch2/branch2.3/Conv" -> "433 QuantizeLinear_/stage3/stage3.7/branch2/branch2.3/Conv_output_0_1" [label="[-1, 116, -1, -1]", style=solid]; +"433 QuantizeLinear_/stage3/stage3.7/branch2/branch2.3/Conv_output_0_1" -> "434 DequantizeLinear_/stage3/stage3.7/branch2/branch2.3/Conv_output_0_1" [label="[-1, 116, -1, -1]", style=dashed]; +"434 DequantizeLinear_/stage3/stage3.7/branch2/branch2.3/Conv_output_0_1" -> "437 /stage3/stage3.7/branch2/branch2.5/Conv" [label="[-1, 116, -1, -1]", style=solid]; "435 QuantizeLinear_onnx^^Conv_1259_1" -> "436 DequantizeLinear_onnx^^Conv_1259_1" [label="[116, 116, 1, 1]", style=dashed]; "436 DequantizeLinear_onnx^^Conv_1259_1" -> "437 /stage3/stage3.7/branch2/branch2.5/Conv" [label="[116, 116, 1, 1]", style=solid]; -"437 /stage3/stage3.7/branch2/branch2.5/Conv" -> "438 /stage3/stage3.7/branch2/branch2.7/Relu" [label="[]", style=solid]; -"438 /stage3/stage3.7/branch2/branch2.7/Relu" -> "439 QuantizeLinear_/stage3/stage3.7/branch2/branch2.7/Relu_output_0_1" [label="[]", style=solid]; -"439 QuantizeLinear_/stage3/stage3.7/branch2/branch2.7/Relu_output_0_1" -> "440 DequantizeLinear_/stage3/stage3.7/branch2/branch2.7/Relu_output_0_1" [label="[]", style=dashed]; -"440 DequantizeLinear_/stage3/stage3.7/branch2/branch2.7/Relu_output_0_1" -> "441 /stage3/stage3.7/Concat" [label="[]", style=solid]; -"441 /stage3/stage3.7/Concat" -> "443 /stage3/stage3.7/Reshape" [label="[]", style=solid]; +"437 /stage3/stage3.7/branch2/branch2.5/Conv" -> "438 /stage3/stage3.7/branch2/branch2.7/Relu" [label="[-1, 116, -1, -1]", style=solid]; +"438 /stage3/stage3.7/branch2/branch2.7/Relu" -> "439 QuantizeLinear_/stage3/stage3.7/branch2/branch2.7/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=solid]; +"439 QuantizeLinear_/stage3/stage3.7/branch2/branch2.7/Relu_output_0_1" -> "440 DequantizeLinear_/stage3/stage3.7/branch2/branch2.7/Relu_output_0_1" [label="[-1, 116, -1, -1]", style=dashed]; +"440 DequantizeLinear_/stage3/stage3.7/branch2/branch2.7/Relu_output_0_1" -> "441 /stage3/stage3.7/Concat" [label="[-1, 116, -1, -1]", style=solid]; +"441 /stage3/stage3.7/Concat" -> "443 /stage3/stage3.7/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "442 /stage3/stage3.7/Constant_6" -> "443 /stage3/stage3.7/Reshape" [label="[5]", style=dashed]; "443 /stage3/stage3.7/Reshape" -> "444 /stage3/stage3.7/Transpose" [label="[1, 2, 116, 14, 14]", style=solid]; "444 /stage3/stage3.7/Transpose" -> "446 /stage3/stage3.7/Reshape_1" [label="[1, 116, 2, 14, 14]", style=solid]; @@ -1165,28 +1165,28 @@ strict digraph { "489 /stage4/stage4.1/Constant_4" -> "490 /stage4/stage4.1/Mul" [label="[1]", style=dashed]; "490 /stage4/stage4.1/Mul" -> "491 /stage4/stage4.1/Slice" [label="[1]", style=dashed]; "490 /stage4/stage4.1/Mul" -> "494 /stage4/stage4.1/Slice_1" [label="[1]", style=dashed]; -"491 /stage4/stage4.1/Slice" -> "512 /stage4/stage4.1/Concat" [label="[]", style=solid]; +"491 /stage4/stage4.1/Slice" -> "512 /stage4/stage4.1/Concat" [label="[-1, -1, -1, -1]", style=solid]; "492 /stage4/stage4.1/Constant_5" -> "493 /stage4/stage4.1/Mul_1" [label="[1]", style=dashed]; "493 /stage4/stage4.1/Mul_1" -> "494 /stage4/stage4.1/Slice_1" [label="[1]", style=dashed]; -"494 /stage4/stage4.1/Slice_1" -> "497 /stage4/stage4.1/branch2/branch2.0/Conv" [label="[]", style=solid]; +"494 /stage4/stage4.1/Slice_1" -> "497 /stage4/stage4.1/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; "495 QuantizeLinear_onnx^^Conv_1277_1" -> "496 DequantizeLinear_onnx^^Conv_1277_1" [label="[232, 232, 1, 1]", style=dashed]; "496 DequantizeLinear_onnx^^Conv_1277_1" -> "497 /stage4/stage4.1/branch2/branch2.0/Conv" [label="[232, 232, 1, 1]", style=solid]; -"497 /stage4/stage4.1/branch2/branch2.0/Conv" -> "498 /stage4/stage4.1/branch2/branch2.2/Relu" [label="[]", style=solid]; -"498 /stage4/stage4.1/branch2/branch2.2/Relu" -> "499 QuantizeLinear_/stage4/stage4.1/branch2/branch2.2/Relu_output_0_1" [label="[]", style=solid]; -"499 QuantizeLinear_/stage4/stage4.1/branch2/branch2.2/Relu_output_0_1" -> "500 DequantizeLinear_/stage4/stage4.1/branch2/branch2.2/Relu_output_0_1" [label="[]", style=dashed]; -"500 DequantizeLinear_/stage4/stage4.1/branch2/branch2.2/Relu_output_0_1" -> "503 /stage4/stage4.1/branch2/branch2.3/Conv" [label="[]", style=solid]; +"497 /stage4/stage4.1/branch2/branch2.0/Conv" -> "498 /stage4/stage4.1/branch2/branch2.2/Relu" [label="[-1, 232, -1, -1]", style=solid]; +"498 /stage4/stage4.1/branch2/branch2.2/Relu" -> "499 QuantizeLinear_/stage4/stage4.1/branch2/branch2.2/Relu_output_0_1" [label="[-1, 232, -1, -1]", style=solid]; +"499 QuantizeLinear_/stage4/stage4.1/branch2/branch2.2/Relu_output_0_1" -> "500 DequantizeLinear_/stage4/stage4.1/branch2/branch2.2/Relu_output_0_1" [label="[-1, 232, -1, -1]", style=dashed]; +"500 DequantizeLinear_/stage4/stage4.1/branch2/branch2.2/Relu_output_0_1" -> "503 /stage4/stage4.1/branch2/branch2.3/Conv" [label="[-1, 232, -1, -1]", style=solid]; "501 QuantizeLinear_onnx^^Conv_1280_1" -> "502 DequantizeLinear_onnx^^Conv_1280_1" [label="[232, 1, 3, 3]", style=dashed]; "502 DequantizeLinear_onnx^^Conv_1280_1" -> "503 /stage4/stage4.1/branch2/branch2.3/Conv" [label="[232, 1, 3, 3]", style=solid]; -"503 /stage4/stage4.1/branch2/branch2.3/Conv" -> "504 QuantizeLinear_/stage4/stage4.1/branch2/branch2.3/Conv_output_0_1" [label="[]", style=solid]; -"504 QuantizeLinear_/stage4/stage4.1/branch2/branch2.3/Conv_output_0_1" -> "505 DequantizeLinear_/stage4/stage4.1/branch2/branch2.3/Conv_output_0_1" [label="[]", style=dashed]; -"505 DequantizeLinear_/stage4/stage4.1/branch2/branch2.3/Conv_output_0_1" -> "508 /stage4/stage4.1/branch2/branch2.5/Conv" [label="[]", style=solid]; +"503 /stage4/stage4.1/branch2/branch2.3/Conv" -> "504 QuantizeLinear_/stage4/stage4.1/branch2/branch2.3/Conv_output_0_1" [label="[-1, 232, -1, -1]", style=solid]; +"504 QuantizeLinear_/stage4/stage4.1/branch2/branch2.3/Conv_output_0_1" -> "505 DequantizeLinear_/stage4/stage4.1/branch2/branch2.3/Conv_output_0_1" [label="[-1, 232, -1, -1]", style=dashed]; +"505 DequantizeLinear_/stage4/stage4.1/branch2/branch2.3/Conv_output_0_1" -> "508 /stage4/stage4.1/branch2/branch2.5/Conv" [label="[-1, 232, -1, -1]", style=solid]; "506 QuantizeLinear_onnx^^Conv_1283_1" -> "507 DequantizeLinear_onnx^^Conv_1283_1" [label="[232, 232, 1, 1]", style=dashed]; "507 DequantizeLinear_onnx^^Conv_1283_1" -> "508 /stage4/stage4.1/branch2/branch2.5/Conv" [label="[232, 232, 1, 1]", style=solid]; -"508 /stage4/stage4.1/branch2/branch2.5/Conv" -> "509 /stage4/stage4.1/branch2/branch2.7/Relu" [label="[]", style=solid]; -"509 /stage4/stage4.1/branch2/branch2.7/Relu" -> "510 QuantizeLinear_/stage4/stage4.1/branch2/branch2.7/Relu_output_0_1" [label="[]", style=solid]; -"510 QuantizeLinear_/stage4/stage4.1/branch2/branch2.7/Relu_output_0_1" -> "511 DequantizeLinear_/stage4/stage4.1/branch2/branch2.7/Relu_output_0_1" [label="[]", style=dashed]; -"511 DequantizeLinear_/stage4/stage4.1/branch2/branch2.7/Relu_output_0_1" -> "512 /stage4/stage4.1/Concat" [label="[]", style=solid]; -"512 /stage4/stage4.1/Concat" -> "514 /stage4/stage4.1/Reshape" [label="[]", style=solid]; +"508 /stage4/stage4.1/branch2/branch2.5/Conv" -> "509 /stage4/stage4.1/branch2/branch2.7/Relu" [label="[-1, 232, -1, -1]", style=solid]; +"509 /stage4/stage4.1/branch2/branch2.7/Relu" -> "510 QuantizeLinear_/stage4/stage4.1/branch2/branch2.7/Relu_output_0_1" [label="[-1, 232, -1, -1]", style=solid]; +"510 QuantizeLinear_/stage4/stage4.1/branch2/branch2.7/Relu_output_0_1" -> "511 DequantizeLinear_/stage4/stage4.1/branch2/branch2.7/Relu_output_0_1" [label="[-1, 232, -1, -1]", style=dashed]; +"511 DequantizeLinear_/stage4/stage4.1/branch2/branch2.7/Relu_output_0_1" -> "512 /stage4/stage4.1/Concat" [label="[-1, 232, -1, -1]", style=solid]; +"512 /stage4/stage4.1/Concat" -> "514 /stage4/stage4.1/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "513 /stage4/stage4.1/Constant_6" -> "514 /stage4/stage4.1/Reshape" [label="[5]", style=dashed]; "514 /stage4/stage4.1/Reshape" -> "515 /stage4/stage4.1/Transpose" [label="[1, 2, 232, 7, 7]", style=solid]; "515 /stage4/stage4.1/Transpose" -> "517 /stage4/stage4.1/Reshape_1" [label="[1, 232, 2, 7, 7]", style=solid]; @@ -1208,28 +1208,28 @@ strict digraph { "526 /stage4/stage4.2/Constant_4" -> "527 /stage4/stage4.2/Mul" [label="[1]", style=dashed]; "527 /stage4/stage4.2/Mul" -> "528 /stage4/stage4.2/Slice" [label="[1]", style=dashed]; "527 /stage4/stage4.2/Mul" -> "531 /stage4/stage4.2/Slice_1" [label="[1]", style=dashed]; -"528 /stage4/stage4.2/Slice" -> "549 /stage4/stage4.2/Concat" [label="[]", style=solid]; +"528 /stage4/stage4.2/Slice" -> "549 /stage4/stage4.2/Concat" [label="[-1, -1, -1, -1]", style=solid]; "529 /stage4/stage4.2/Constant_5" -> "530 /stage4/stage4.2/Mul_1" [label="[1]", style=dashed]; "530 /stage4/stage4.2/Mul_1" -> "531 /stage4/stage4.2/Slice_1" [label="[1]", style=dashed]; -"531 /stage4/stage4.2/Slice_1" -> "534 /stage4/stage4.2/branch2/branch2.0/Conv" [label="[]", style=solid]; +"531 /stage4/stage4.2/Slice_1" -> "534 /stage4/stage4.2/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; "532 QuantizeLinear_onnx^^Conv_1286_1" -> "533 DequantizeLinear_onnx^^Conv_1286_1" [label="[232, 232, 1, 1]", style=dashed]; "533 DequantizeLinear_onnx^^Conv_1286_1" -> "534 /stage4/stage4.2/branch2/branch2.0/Conv" [label="[232, 232, 1, 1]", style=solid]; -"534 /stage4/stage4.2/branch2/branch2.0/Conv" -> "535 /stage4/stage4.2/branch2/branch2.2/Relu" [label="[]", style=solid]; -"535 /stage4/stage4.2/branch2/branch2.2/Relu" -> "536 QuantizeLinear_/stage4/stage4.2/branch2/branch2.2/Relu_output_0_1" [label="[]", style=solid]; -"536 QuantizeLinear_/stage4/stage4.2/branch2/branch2.2/Relu_output_0_1" -> "537 DequantizeLinear_/stage4/stage4.2/branch2/branch2.2/Relu_output_0_1" [label="[]", style=dashed]; -"537 DequantizeLinear_/stage4/stage4.2/branch2/branch2.2/Relu_output_0_1" -> "540 /stage4/stage4.2/branch2/branch2.3/Conv" [label="[]", style=solid]; +"534 /stage4/stage4.2/branch2/branch2.0/Conv" -> "535 /stage4/stage4.2/branch2/branch2.2/Relu" [label="[-1, 232, -1, -1]", style=solid]; +"535 /stage4/stage4.2/branch2/branch2.2/Relu" -> "536 QuantizeLinear_/stage4/stage4.2/branch2/branch2.2/Relu_output_0_1" [label="[-1, 232, -1, -1]", style=solid]; +"536 QuantizeLinear_/stage4/stage4.2/branch2/branch2.2/Relu_output_0_1" -> "537 DequantizeLinear_/stage4/stage4.2/branch2/branch2.2/Relu_output_0_1" [label="[-1, 232, -1, -1]", style=dashed]; +"537 DequantizeLinear_/stage4/stage4.2/branch2/branch2.2/Relu_output_0_1" -> "540 /stage4/stage4.2/branch2/branch2.3/Conv" [label="[-1, 232, -1, -1]", style=solid]; "538 QuantizeLinear_onnx^^Conv_1289_1" -> "539 DequantizeLinear_onnx^^Conv_1289_1" [label="[232, 1, 3, 3]", style=dashed]; "539 DequantizeLinear_onnx^^Conv_1289_1" -> "540 /stage4/stage4.2/branch2/branch2.3/Conv" [label="[232, 1, 3, 3]", style=solid]; -"540 /stage4/stage4.2/branch2/branch2.3/Conv" -> "541 QuantizeLinear_/stage4/stage4.2/branch2/branch2.3/Conv_output_0_1" [label="[]", style=solid]; -"541 QuantizeLinear_/stage4/stage4.2/branch2/branch2.3/Conv_output_0_1" -> "542 DequantizeLinear_/stage4/stage4.2/branch2/branch2.3/Conv_output_0_1" [label="[]", style=dashed]; -"542 DequantizeLinear_/stage4/stage4.2/branch2/branch2.3/Conv_output_0_1" -> "545 /stage4/stage4.2/branch2/branch2.5/Conv" [label="[]", style=solid]; +"540 /stage4/stage4.2/branch2/branch2.3/Conv" -> "541 QuantizeLinear_/stage4/stage4.2/branch2/branch2.3/Conv_output_0_1" [label="[-1, 232, -1, -1]", style=solid]; +"541 QuantizeLinear_/stage4/stage4.2/branch2/branch2.3/Conv_output_0_1" -> "542 DequantizeLinear_/stage4/stage4.2/branch2/branch2.3/Conv_output_0_1" [label="[-1, 232, -1, -1]", style=dashed]; +"542 DequantizeLinear_/stage4/stage4.2/branch2/branch2.3/Conv_output_0_1" -> "545 /stage4/stage4.2/branch2/branch2.5/Conv" [label="[-1, 232, -1, -1]", style=solid]; "543 QuantizeLinear_onnx^^Conv_1292_1" -> "544 DequantizeLinear_onnx^^Conv_1292_1" [label="[232, 232, 1, 1]", style=dashed]; "544 DequantizeLinear_onnx^^Conv_1292_1" -> "545 /stage4/stage4.2/branch2/branch2.5/Conv" [label="[232, 232, 1, 1]", style=solid]; -"545 /stage4/stage4.2/branch2/branch2.5/Conv" -> "546 /stage4/stage4.2/branch2/branch2.7/Relu" [label="[]", style=solid]; -"546 /stage4/stage4.2/branch2/branch2.7/Relu" -> "547 QuantizeLinear_/stage4/stage4.2/branch2/branch2.7/Relu_output_0_1" [label="[]", style=solid]; -"547 QuantizeLinear_/stage4/stage4.2/branch2/branch2.7/Relu_output_0_1" -> "548 DequantizeLinear_/stage4/stage4.2/branch2/branch2.7/Relu_output_0_1" [label="[]", style=dashed]; -"548 DequantizeLinear_/stage4/stage4.2/branch2/branch2.7/Relu_output_0_1" -> "549 /stage4/stage4.2/Concat" [label="[]", style=solid]; -"549 /stage4/stage4.2/Concat" -> "551 /stage4/stage4.2/Reshape" [label="[]", style=solid]; +"545 /stage4/stage4.2/branch2/branch2.5/Conv" -> "546 /stage4/stage4.2/branch2/branch2.7/Relu" [label="[-1, 232, -1, -1]", style=solid]; +"546 /stage4/stage4.2/branch2/branch2.7/Relu" -> "547 QuantizeLinear_/stage4/stage4.2/branch2/branch2.7/Relu_output_0_1" [label="[-1, 232, -1, -1]", style=solid]; +"547 QuantizeLinear_/stage4/stage4.2/branch2/branch2.7/Relu_output_0_1" -> "548 DequantizeLinear_/stage4/stage4.2/branch2/branch2.7/Relu_output_0_1" [label="[-1, 232, -1, -1]", style=dashed]; +"548 DequantizeLinear_/stage4/stage4.2/branch2/branch2.7/Relu_output_0_1" -> "549 /stage4/stage4.2/Concat" [label="[-1, 232, -1, -1]", style=solid]; +"549 /stage4/stage4.2/Concat" -> "551 /stage4/stage4.2/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "550 /stage4/stage4.2/Constant_6" -> "551 /stage4/stage4.2/Reshape" [label="[5]", style=dashed]; "551 /stage4/stage4.2/Reshape" -> "552 /stage4/stage4.2/Transpose" [label="[1, 2, 232, 7, 7]", style=solid]; "552 /stage4/stage4.2/Transpose" -> "554 /stage4/stage4.2/Reshape_1" [label="[1, 232, 2, 7, 7]", style=solid]; @@ -1251,28 +1251,28 @@ strict digraph { "563 /stage4/stage4.3/Constant_4" -> "564 /stage4/stage4.3/Mul" [label="[1]", style=dashed]; "564 /stage4/stage4.3/Mul" -> "565 /stage4/stage4.3/Slice" [label="[1]", style=dashed]; "564 /stage4/stage4.3/Mul" -> "568 /stage4/stage4.3/Slice_1" [label="[1]", style=dashed]; -"565 /stage4/stage4.3/Slice" -> "586 /stage4/stage4.3/Concat" [label="[]", style=solid]; +"565 /stage4/stage4.3/Slice" -> "586 /stage4/stage4.3/Concat" [label="[-1, -1, -1, -1]", style=solid]; "566 /stage4/stage4.3/Constant_5" -> "567 /stage4/stage4.3/Mul_1" [label="[1]", style=dashed]; "567 /stage4/stage4.3/Mul_1" -> "568 /stage4/stage4.3/Slice_1" [label="[1]", style=dashed]; -"568 /stage4/stage4.3/Slice_1" -> "571 /stage4/stage4.3/branch2/branch2.0/Conv" [label="[]", style=solid]; +"568 /stage4/stage4.3/Slice_1" -> "571 /stage4/stage4.3/branch2/branch2.0/Conv" [label="[-1, -1, -1, -1]", style=solid]; "569 QuantizeLinear_onnx^^Conv_1295_1" -> "570 DequantizeLinear_onnx^^Conv_1295_1" [label="[232, 232, 1, 1]", style=dashed]; "570 DequantizeLinear_onnx^^Conv_1295_1" -> "571 /stage4/stage4.3/branch2/branch2.0/Conv" [label="[232, 232, 1, 1]", style=solid]; -"571 /stage4/stage4.3/branch2/branch2.0/Conv" -> "572 /stage4/stage4.3/branch2/branch2.2/Relu" [label="[]", style=solid]; -"572 /stage4/stage4.3/branch2/branch2.2/Relu" -> "573 QuantizeLinear_/stage4/stage4.3/branch2/branch2.2/Relu_output_0_1" [label="[]", style=solid]; -"573 QuantizeLinear_/stage4/stage4.3/branch2/branch2.2/Relu_output_0_1" -> "574 DequantizeLinear_/stage4/stage4.3/branch2/branch2.2/Relu_output_0_1" [label="[]", style=dashed]; -"574 DequantizeLinear_/stage4/stage4.3/branch2/branch2.2/Relu_output_0_1" -> "577 /stage4/stage4.3/branch2/branch2.3/Conv" [label="[]", style=solid]; +"571 /stage4/stage4.3/branch2/branch2.0/Conv" -> "572 /stage4/stage4.3/branch2/branch2.2/Relu" [label="[-1, 232, -1, -1]", style=solid]; +"572 /stage4/stage4.3/branch2/branch2.2/Relu" -> "573 QuantizeLinear_/stage4/stage4.3/branch2/branch2.2/Relu_output_0_1" [label="[-1, 232, -1, -1]", style=solid]; +"573 QuantizeLinear_/stage4/stage4.3/branch2/branch2.2/Relu_output_0_1" -> "574 DequantizeLinear_/stage4/stage4.3/branch2/branch2.2/Relu_output_0_1" [label="[-1, 232, -1, -1]", style=dashed]; +"574 DequantizeLinear_/stage4/stage4.3/branch2/branch2.2/Relu_output_0_1" -> "577 /stage4/stage4.3/branch2/branch2.3/Conv" [label="[-1, 232, -1, -1]", style=solid]; "575 QuantizeLinear_onnx^^Conv_1298_1" -> "576 DequantizeLinear_onnx^^Conv_1298_1" [label="[232, 1, 3, 3]", style=dashed]; "576 DequantizeLinear_onnx^^Conv_1298_1" -> "577 /stage4/stage4.3/branch2/branch2.3/Conv" [label="[232, 1, 3, 3]", style=solid]; -"577 /stage4/stage4.3/branch2/branch2.3/Conv" -> "578 QuantizeLinear_/stage4/stage4.3/branch2/branch2.3/Conv_output_0_1" [label="[]", style=solid]; -"578 QuantizeLinear_/stage4/stage4.3/branch2/branch2.3/Conv_output_0_1" -> "579 DequantizeLinear_/stage4/stage4.3/branch2/branch2.3/Conv_output_0_1" [label="[]", style=dashed]; -"579 DequantizeLinear_/stage4/stage4.3/branch2/branch2.3/Conv_output_0_1" -> "582 /stage4/stage4.3/branch2/branch2.5/Conv" [label="[]", style=solid]; +"577 /stage4/stage4.3/branch2/branch2.3/Conv" -> "578 QuantizeLinear_/stage4/stage4.3/branch2/branch2.3/Conv_output_0_1" [label="[-1, 232, -1, -1]", style=solid]; +"578 QuantizeLinear_/stage4/stage4.3/branch2/branch2.3/Conv_output_0_1" -> "579 DequantizeLinear_/stage4/stage4.3/branch2/branch2.3/Conv_output_0_1" [label="[-1, 232, -1, -1]", style=dashed]; +"579 DequantizeLinear_/stage4/stage4.3/branch2/branch2.3/Conv_output_0_1" -> "582 /stage4/stage4.3/branch2/branch2.5/Conv" [label="[-1, 232, -1, -1]", style=solid]; "580 QuantizeLinear_onnx^^Conv_1301_1" -> "581 DequantizeLinear_onnx^^Conv_1301_1" [label="[232, 232, 1, 1]", style=dashed]; "581 DequantizeLinear_onnx^^Conv_1301_1" -> "582 /stage4/stage4.3/branch2/branch2.5/Conv" [label="[232, 232, 1, 1]", style=solid]; -"582 /stage4/stage4.3/branch2/branch2.5/Conv" -> "583 /stage4/stage4.3/branch2/branch2.7/Relu" [label="[]", style=solid]; -"583 /stage4/stage4.3/branch2/branch2.7/Relu" -> "584 QuantizeLinear_/stage4/stage4.3/branch2/branch2.7/Relu_output_0_1" [label="[]", style=solid]; -"584 QuantizeLinear_/stage4/stage4.3/branch2/branch2.7/Relu_output_0_1" -> "585 DequantizeLinear_/stage4/stage4.3/branch2/branch2.7/Relu_output_0_1" [label="[]", style=dashed]; -"585 DequantizeLinear_/stage4/stage4.3/branch2/branch2.7/Relu_output_0_1" -> "586 /stage4/stage4.3/Concat" [label="[]", style=solid]; -"586 /stage4/stage4.3/Concat" -> "588 /stage4/stage4.3/Reshape" [label="[]", style=solid]; +"582 /stage4/stage4.3/branch2/branch2.5/Conv" -> "583 /stage4/stage4.3/branch2/branch2.7/Relu" [label="[-1, 232, -1, -1]", style=solid]; +"583 /stage4/stage4.3/branch2/branch2.7/Relu" -> "584 QuantizeLinear_/stage4/stage4.3/branch2/branch2.7/Relu_output_0_1" [label="[-1, 232, -1, -1]", style=solid]; +"584 QuantizeLinear_/stage4/stage4.3/branch2/branch2.7/Relu_output_0_1" -> "585 DequantizeLinear_/stage4/stage4.3/branch2/branch2.7/Relu_output_0_1" [label="[-1, 232, -1, -1]", style=dashed]; +"585 DequantizeLinear_/stage4/stage4.3/branch2/branch2.7/Relu_output_0_1" -> "586 /stage4/stage4.3/Concat" [label="[-1, 232, -1, -1]", style=solid]; +"586 /stage4/stage4.3/Concat" -> "588 /stage4/stage4.3/Reshape" [label="[-1, -1, -1, -1]", style=solid]; "587 /stage4/stage4.3/Constant_6" -> "588 /stage4/stage4.3/Reshape" [label="[5]", style=dashed]; "588 /stage4/stage4.3/Reshape" -> "589 /stage4/stage4.3/Transpose" [label="[1, 2, 232, 7, 7]", style=solid]; "589 /stage4/stage4.3/Transpose" -> "591 /stage4/stage4.3/Reshape_1" [label="[1, 232, 2, 7, 7]", style=solid]; diff --git a/tests/onnx/data/reference_graphs/quantization/squeezenet1_0.dot b/tests/onnx/data/reference_graphs/quantization/squeezenet1_0.dot index 46682fc6421..6e8087dcab5 100644 --- a/tests/onnx/data/reference_graphs/quantization/squeezenet1_0.dot +++ b/tests/onnx/data/reference_graphs/quantization/squeezenet1_0.dot @@ -172,184 +172,184 @@ strict digraph { "170 /Flatten" [id=170, type=Flatten]; "171 nncf_model_input_0" [id=171, type=nncf_model_input]; "172 nncf_model_output_0" [id=172, type=nncf_model_output]; -"0 QuantizeLinear_input.1_1" -> "1 DequantizeLinear_input.1_1" [label="(1, 3, 224, 224)", style=dashed]; -"1 DequantizeLinear_input.1_1" -> "4 /features/features.0/Conv" [label="(1, 3, 224, 224)", style=solid]; -"2 QuantizeLinear_features.0.weight_1" -> "3 DequantizeLinear_features.0.weight_1" [label="(96, 3, 7, 7)", style=dashed]; -"3 DequantizeLinear_features.0.weight_1" -> "4 /features/features.0/Conv" [label="(96, 3, 7, 7)", style=solid]; -"4 /features/features.0/Conv" -> "5 /features/features.1/Relu" [label="(1, 96, 109, 109)", style=solid]; -"5 /features/features.1/Relu" -> "6 QuantizeLinear_/features/features.1/Relu_output_0_1" [label="(1, 96, 109, 109)", style=solid]; -"6 QuantizeLinear_/features/features.1/Relu_output_0_1" -> "7 DequantizeLinear_/features/features.1/Relu_output_0_1" [label="(1, 96, 109, 109)", style=dashed]; -"7 DequantizeLinear_/features/features.1/Relu_output_0_1" -> "8 /features/features.2/MaxPool" [label="(1, 96, 109, 109)", style=solid]; -"8 /features/features.2/MaxPool" -> "11 /features/features.3/squeeze/Conv" [label="(1, 96, 54, 54)", style=solid]; -"9 QuantizeLinear_features.3.squeeze.weight_1" -> "10 DequantizeLinear_features.3.squeeze.weight_1" [label="(16, 96, 1, 1)", style=dashed]; -"10 DequantizeLinear_features.3.squeeze.weight_1" -> "11 /features/features.3/squeeze/Conv" [label="(16, 96, 1, 1)", style=solid]; -"11 /features/features.3/squeeze/Conv" -> "12 /features/features.3/squeeze_activation/Relu" [label="(1, 16, 54, 54)", style=solid]; -"12 /features/features.3/squeeze_activation/Relu" -> "13 QuantizeLinear_/features/features.3/squeeze_activation/Relu_output_0_1" [label="(1, 16, 54, 54)", style=solid]; -"13 QuantizeLinear_/features/features.3/squeeze_activation/Relu_output_0_1" -> "14 DequantizeLinear_/features/features.3/squeeze_activation/Relu_output_0_1" [label="(1, 16, 54, 54)", style=dashed]; -"14 DequantizeLinear_/features/features.3/squeeze_activation/Relu_output_0_1" -> "17 /features/features.3/expand1x1/Conv" [label="(1, 16, 54, 54)", style=solid]; -"14 DequantizeLinear_/features/features.3/squeeze_activation/Relu_output_0_1" -> "21 /features/features.3/expand3x3/Conv" [label="(1, 16, 54, 54)", style=solid]; -"15 QuantizeLinear_features.3.expand1x1.weight_1" -> "16 DequantizeLinear_features.3.expand1x1.weight_1" [label="(64, 16, 1, 1)", style=dashed]; -"16 DequantizeLinear_features.3.expand1x1.weight_1" -> "17 /features/features.3/expand1x1/Conv" [label="(64, 16, 1, 1)", style=solid]; -"17 /features/features.3/expand1x1/Conv" -> "18 /features/features.3/expand1x1_activation/Relu" [label="(1, 64, 54, 54)", style=solid]; -"18 /features/features.3/expand1x1_activation/Relu" -> "23 QuantizeLinear_/features/features.3/expand1x1_activation/Relu_output_0_1" [label="(1, 64, 54, 54)", style=solid]; -"19 QuantizeLinear_features.3.expand3x3.weight_1" -> "20 DequantizeLinear_features.3.expand3x3.weight_1" [label="(64, 16, 3, 3)", style=dashed]; -"20 DequantizeLinear_features.3.expand3x3.weight_1" -> "21 /features/features.3/expand3x3/Conv" [label="(64, 16, 3, 3)", style=solid]; -"21 /features/features.3/expand3x3/Conv" -> "22 /features/features.3/expand3x3_activation/Relu" [label="(1, 64, 54, 54)", style=solid]; -"22 /features/features.3/expand3x3_activation/Relu" -> "25 QuantizeLinear_/features/features.3/expand3x3_activation/Relu_output_0_1" [label="(1, 64, 54, 54)", style=solid]; -"23 QuantizeLinear_/features/features.3/expand1x1_activation/Relu_output_0_1" -> "24 DequantizeLinear_/features/features.3/expand1x1_activation/Relu_output_0_1" [label="(1, 64, 54, 54)", style=dashed]; -"24 DequantizeLinear_/features/features.3/expand1x1_activation/Relu_output_0_1" -> "27 /features/features.3/Concat" [label="(1, 64, 54, 54)", style=solid]; -"25 QuantizeLinear_/features/features.3/expand3x3_activation/Relu_output_0_1" -> "26 DequantizeLinear_/features/features.3/expand3x3_activation/Relu_output_0_1" [label="(1, 64, 54, 54)", style=dashed]; -"26 DequantizeLinear_/features/features.3/expand3x3_activation/Relu_output_0_1" -> "27 /features/features.3/Concat" [label="(1, 64, 54, 54)", style=solid]; -"27 /features/features.3/Concat" -> "30 /features/features.4/squeeze/Conv" [label="(1, 128, 54, 54)", style=solid]; -"28 QuantizeLinear_features.4.squeeze.weight_1" -> "29 DequantizeLinear_features.4.squeeze.weight_1" [label="(16, 128, 1, 1)", style=dashed]; -"29 DequantizeLinear_features.4.squeeze.weight_1" -> "30 /features/features.4/squeeze/Conv" [label="(16, 128, 1, 1)", style=solid]; -"30 /features/features.4/squeeze/Conv" -> "31 /features/features.4/squeeze_activation/Relu" [label="(1, 16, 54, 54)", style=solid]; -"31 /features/features.4/squeeze_activation/Relu" -> "32 QuantizeLinear_/features/features.4/squeeze_activation/Relu_output_0_1" [label="(1, 16, 54, 54)", style=solid]; -"32 QuantizeLinear_/features/features.4/squeeze_activation/Relu_output_0_1" -> "33 DequantizeLinear_/features/features.4/squeeze_activation/Relu_output_0_1" [label="(1, 16, 54, 54)", style=dashed]; -"33 DequantizeLinear_/features/features.4/squeeze_activation/Relu_output_0_1" -> "36 /features/features.4/expand1x1/Conv" [label="(1, 16, 54, 54)", style=solid]; -"33 DequantizeLinear_/features/features.4/squeeze_activation/Relu_output_0_1" -> "40 /features/features.4/expand3x3/Conv" [label="(1, 16, 54, 54)", style=solid]; -"34 QuantizeLinear_features.4.expand1x1.weight_1" -> "35 DequantizeLinear_features.4.expand1x1.weight_1" [label="(64, 16, 1, 1)", style=dashed]; -"35 DequantizeLinear_features.4.expand1x1.weight_1" -> "36 /features/features.4/expand1x1/Conv" [label="(64, 16, 1, 1)", style=solid]; -"36 /features/features.4/expand1x1/Conv" -> "37 /features/features.4/expand1x1_activation/Relu" [label="(1, 64, 54, 54)", style=solid]; -"37 /features/features.4/expand1x1_activation/Relu" -> "42 QuantizeLinear_/features/features.4/expand1x1_activation/Relu_output_0_1" [label="(1, 64, 54, 54)", style=solid]; -"38 QuantizeLinear_features.4.expand3x3.weight_1" -> "39 DequantizeLinear_features.4.expand3x3.weight_1" [label="(64, 16, 3, 3)", style=dashed]; -"39 DequantizeLinear_features.4.expand3x3.weight_1" -> "40 /features/features.4/expand3x3/Conv" [label="(64, 16, 3, 3)", style=solid]; -"40 /features/features.4/expand3x3/Conv" -> "41 /features/features.4/expand3x3_activation/Relu" [label="(1, 64, 54, 54)", style=solid]; -"41 /features/features.4/expand3x3_activation/Relu" -> "44 QuantizeLinear_/features/features.4/expand3x3_activation/Relu_output_0_1" [label="(1, 64, 54, 54)", style=solid]; -"42 QuantizeLinear_/features/features.4/expand1x1_activation/Relu_output_0_1" -> "43 DequantizeLinear_/features/features.4/expand1x1_activation/Relu_output_0_1" [label="(1, 64, 54, 54)", style=dashed]; -"43 DequantizeLinear_/features/features.4/expand1x1_activation/Relu_output_0_1" -> "46 /features/features.4/Concat" [label="(1, 64, 54, 54)", style=solid]; -"44 QuantizeLinear_/features/features.4/expand3x3_activation/Relu_output_0_1" -> "45 DequantizeLinear_/features/features.4/expand3x3_activation/Relu_output_0_1" [label="(1, 64, 54, 54)", style=dashed]; -"45 DequantizeLinear_/features/features.4/expand3x3_activation/Relu_output_0_1" -> "46 /features/features.4/Concat" [label="(1, 64, 54, 54)", style=solid]; -"46 /features/features.4/Concat" -> "49 /features/features.5/squeeze/Conv" [label="(1, 128, 54, 54)", style=solid]; -"47 QuantizeLinear_features.5.squeeze.weight_1" -> "48 DequantizeLinear_features.5.squeeze.weight_1" [label="(32, 128, 1, 1)", style=dashed]; -"48 DequantizeLinear_features.5.squeeze.weight_1" -> "49 /features/features.5/squeeze/Conv" [label="(32, 128, 1, 1)", style=solid]; -"49 /features/features.5/squeeze/Conv" -> "50 /features/features.5/squeeze_activation/Relu" [label="(1, 32, 54, 54)", style=solid]; -"50 /features/features.5/squeeze_activation/Relu" -> "51 QuantizeLinear_/features/features.5/squeeze_activation/Relu_output_0_1" [label="(1, 32, 54, 54)", style=solid]; -"51 QuantizeLinear_/features/features.5/squeeze_activation/Relu_output_0_1" -> "52 DequantizeLinear_/features/features.5/squeeze_activation/Relu_output_0_1" [label="(1, 32, 54, 54)", style=dashed]; -"52 DequantizeLinear_/features/features.5/squeeze_activation/Relu_output_0_1" -> "55 /features/features.5/expand1x1/Conv" [label="(1, 32, 54, 54)", style=solid]; -"52 DequantizeLinear_/features/features.5/squeeze_activation/Relu_output_0_1" -> "59 /features/features.5/expand3x3/Conv" [label="(1, 32, 54, 54)", style=solid]; -"53 QuantizeLinear_features.5.expand1x1.weight_1" -> "54 DequantizeLinear_features.5.expand1x1.weight_1" [label="(128, 32, 1, 1)", style=dashed]; -"54 DequantizeLinear_features.5.expand1x1.weight_1" -> "55 /features/features.5/expand1x1/Conv" [label="(128, 32, 1, 1)", style=solid]; -"55 /features/features.5/expand1x1/Conv" -> "56 /features/features.5/expand1x1_activation/Relu" [label="(1, 128, 54, 54)", style=solid]; -"56 /features/features.5/expand1x1_activation/Relu" -> "63 QuantizeLinear_/features/features.5/expand1x1_activation/Relu_output_0_1" [label="(1, 128, 54, 54)", style=solid]; -"57 QuantizeLinear_features.5.expand3x3.weight_1" -> "58 DequantizeLinear_features.5.expand3x3.weight_1" [label="(128, 32, 3, 3)", style=dashed]; -"58 DequantizeLinear_features.5.expand3x3.weight_1" -> "59 /features/features.5/expand3x3/Conv" [label="(128, 32, 3, 3)", style=solid]; -"59 /features/features.5/expand3x3/Conv" -> "60 /features/features.5/expand3x3_activation/Relu" [label="(1, 128, 54, 54)", style=solid]; -"60 /features/features.5/expand3x3_activation/Relu" -> "61 QuantizeLinear_/features/features.5/expand3x3_activation/Relu_output_0_1" [label="(1, 128, 54, 54)", style=solid]; -"61 QuantizeLinear_/features/features.5/expand3x3_activation/Relu_output_0_1" -> "62 DequantizeLinear_/features/features.5/expand3x3_activation/Relu_output_0_1" [label="(1, 128, 54, 54)", style=dashed]; -"62 DequantizeLinear_/features/features.5/expand3x3_activation/Relu_output_0_1" -> "65 /features/features.5/Concat" [label="(1, 128, 54, 54)", style=solid]; -"63 QuantizeLinear_/features/features.5/expand1x1_activation/Relu_output_0_1" -> "64 DequantizeLinear_/features/features.5/expand1x1_activation/Relu_output_0_1" [label="(1, 128, 54, 54)", style=dashed]; -"64 DequantizeLinear_/features/features.5/expand1x1_activation/Relu_output_0_1" -> "65 /features/features.5/Concat" [label="(1, 128, 54, 54)", style=solid]; -"65 /features/features.5/Concat" -> "66 /features/features.6/MaxPool" [label="(1, 256, 54, 54)", style=solid]; -"66 /features/features.6/MaxPool" -> "69 /features/features.7/squeeze/Conv" [label="(1, 256, 27, 27)", style=solid]; -"67 QuantizeLinear_features.7.squeeze.weight_1" -> "68 DequantizeLinear_features.7.squeeze.weight_1" [label="(32, 256, 1, 1)", style=dashed]; -"68 DequantizeLinear_features.7.squeeze.weight_1" -> "69 /features/features.7/squeeze/Conv" [label="(32, 256, 1, 1)", style=solid]; -"69 /features/features.7/squeeze/Conv" -> "70 /features/features.7/squeeze_activation/Relu" [label="(1, 32, 27, 27)", style=solid]; -"70 /features/features.7/squeeze_activation/Relu" -> "71 QuantizeLinear_/features/features.7/squeeze_activation/Relu_output_0_1" [label="(1, 32, 27, 27)", style=solid]; -"71 QuantizeLinear_/features/features.7/squeeze_activation/Relu_output_0_1" -> "72 DequantizeLinear_/features/features.7/squeeze_activation/Relu_output_0_1" [label="(1, 32, 27, 27)", style=dashed]; -"72 DequantizeLinear_/features/features.7/squeeze_activation/Relu_output_0_1" -> "75 /features/features.7/expand1x1/Conv" [label="(1, 32, 27, 27)", style=solid]; -"72 DequantizeLinear_/features/features.7/squeeze_activation/Relu_output_0_1" -> "79 /features/features.7/expand3x3/Conv" [label="(1, 32, 27, 27)", style=solid]; -"73 QuantizeLinear_features.7.expand1x1.weight_1" -> "74 DequantizeLinear_features.7.expand1x1.weight_1" [label="(128, 32, 1, 1)", style=dashed]; -"74 DequantizeLinear_features.7.expand1x1.weight_1" -> "75 /features/features.7/expand1x1/Conv" [label="(128, 32, 1, 1)", style=solid]; -"75 /features/features.7/expand1x1/Conv" -> "76 /features/features.7/expand1x1_activation/Relu" [label="(1, 128, 27, 27)", style=solid]; -"76 /features/features.7/expand1x1_activation/Relu" -> "83 QuantizeLinear_/features/features.7/expand1x1_activation/Relu_output_0_1" [label="(1, 128, 27, 27)", style=solid]; -"77 QuantizeLinear_features.7.expand3x3.weight_1" -> "78 DequantizeLinear_features.7.expand3x3.weight_1" [label="(128, 32, 3, 3)", style=dashed]; -"78 DequantizeLinear_features.7.expand3x3.weight_1" -> "79 /features/features.7/expand3x3/Conv" [label="(128, 32, 3, 3)", style=solid]; -"79 /features/features.7/expand3x3/Conv" -> "80 /features/features.7/expand3x3_activation/Relu" [label="(1, 128, 27, 27)", style=solid]; -"80 /features/features.7/expand3x3_activation/Relu" -> "81 QuantizeLinear_/features/features.7/expand3x3_activation/Relu_output_0_1" [label="(1, 128, 27, 27)", style=solid]; -"81 QuantizeLinear_/features/features.7/expand3x3_activation/Relu_output_0_1" -> "82 DequantizeLinear_/features/features.7/expand3x3_activation/Relu_output_0_1" [label="(1, 128, 27, 27)", style=dashed]; -"82 DequantizeLinear_/features/features.7/expand3x3_activation/Relu_output_0_1" -> "85 /features/features.7/Concat" [label="(1, 128, 27, 27)", style=solid]; -"83 QuantizeLinear_/features/features.7/expand1x1_activation/Relu_output_0_1" -> "84 DequantizeLinear_/features/features.7/expand1x1_activation/Relu_output_0_1" [label="(1, 128, 27, 27)", style=dashed]; -"84 DequantizeLinear_/features/features.7/expand1x1_activation/Relu_output_0_1" -> "85 /features/features.7/Concat" [label="(1, 128, 27, 27)", style=solid]; -"85 /features/features.7/Concat" -> "88 /features/features.8/squeeze/Conv" [label="(1, 256, 27, 27)", style=solid]; -"86 QuantizeLinear_features.8.squeeze.weight_1" -> "87 DequantizeLinear_features.8.squeeze.weight_1" [label="(48, 256, 1, 1)", style=dashed]; -"87 DequantizeLinear_features.8.squeeze.weight_1" -> "88 /features/features.8/squeeze/Conv" [label="(48, 256, 1, 1)", style=solid]; -"88 /features/features.8/squeeze/Conv" -> "89 /features/features.8/squeeze_activation/Relu" [label="(1, 48, 27, 27)", style=solid]; -"89 /features/features.8/squeeze_activation/Relu" -> "90 QuantizeLinear_/features/features.8/squeeze_activation/Relu_output_0_1" [label="(1, 48, 27, 27)", style=solid]; -"90 QuantizeLinear_/features/features.8/squeeze_activation/Relu_output_0_1" -> "91 DequantizeLinear_/features/features.8/squeeze_activation/Relu_output_0_1" [label="(1, 48, 27, 27)", style=dashed]; -"91 DequantizeLinear_/features/features.8/squeeze_activation/Relu_output_0_1" -> "94 /features/features.8/expand1x1/Conv" [label="(1, 48, 27, 27)", style=solid]; -"91 DequantizeLinear_/features/features.8/squeeze_activation/Relu_output_0_1" -> "98 /features/features.8/expand3x3/Conv" [label="(1, 48, 27, 27)", style=solid]; -"92 QuantizeLinear_features.8.expand1x1.weight_1" -> "93 DequantizeLinear_features.8.expand1x1.weight_1" [label="(192, 48, 1, 1)", style=dashed]; -"93 DequantizeLinear_features.8.expand1x1.weight_1" -> "94 /features/features.8/expand1x1/Conv" [label="(192, 48, 1, 1)", style=solid]; -"94 /features/features.8/expand1x1/Conv" -> "95 /features/features.8/expand1x1_activation/Relu" [label="(1, 192, 27, 27)", style=solid]; -"95 /features/features.8/expand1x1_activation/Relu" -> "100 QuantizeLinear_/features/features.8/expand1x1_activation/Relu_output_0_1" [label="(1, 192, 27, 27)", style=solid]; -"96 QuantizeLinear_features.8.expand3x3.weight_1" -> "97 DequantizeLinear_features.8.expand3x3.weight_1" [label="(192, 48, 3, 3)", style=dashed]; -"97 DequantizeLinear_features.8.expand3x3.weight_1" -> "98 /features/features.8/expand3x3/Conv" [label="(192, 48, 3, 3)", style=solid]; -"98 /features/features.8/expand3x3/Conv" -> "99 /features/features.8/expand3x3_activation/Relu" [label="(1, 192, 27, 27)", style=solid]; -"99 /features/features.8/expand3x3_activation/Relu" -> "102 QuantizeLinear_/features/features.8/expand3x3_activation/Relu_output_0_1" [label="(1, 192, 27, 27)", style=solid]; -"100 QuantizeLinear_/features/features.8/expand1x1_activation/Relu_output_0_1" -> "101 DequantizeLinear_/features/features.8/expand1x1_activation/Relu_output_0_1" [label="(1, 192, 27, 27)", style=dashed]; -"101 DequantizeLinear_/features/features.8/expand1x1_activation/Relu_output_0_1" -> "104 /features/features.8/Concat" [label="(1, 192, 27, 27)", style=solid]; -"102 QuantizeLinear_/features/features.8/expand3x3_activation/Relu_output_0_1" -> "103 DequantizeLinear_/features/features.8/expand3x3_activation/Relu_output_0_1" [label="(1, 192, 27, 27)", style=dashed]; -"103 DequantizeLinear_/features/features.8/expand3x3_activation/Relu_output_0_1" -> "104 /features/features.8/Concat" [label="(1, 192, 27, 27)", style=solid]; -"104 /features/features.8/Concat" -> "107 /features/features.9/squeeze/Conv" [label="(1, 384, 27, 27)", style=solid]; -"105 QuantizeLinear_features.9.squeeze.weight_1" -> "106 DequantizeLinear_features.9.squeeze.weight_1" [label="(48, 384, 1, 1)", style=dashed]; -"106 DequantizeLinear_features.9.squeeze.weight_1" -> "107 /features/features.9/squeeze/Conv" [label="(48, 384, 1, 1)", style=solid]; -"107 /features/features.9/squeeze/Conv" -> "108 /features/features.9/squeeze_activation/Relu" [label="(1, 48, 27, 27)", style=solid]; -"108 /features/features.9/squeeze_activation/Relu" -> "109 QuantizeLinear_/features/features.9/squeeze_activation/Relu_output_0_1" [label="(1, 48, 27, 27)", style=solid]; -"109 QuantizeLinear_/features/features.9/squeeze_activation/Relu_output_0_1" -> "110 DequantizeLinear_/features/features.9/squeeze_activation/Relu_output_0_1" [label="(1, 48, 27, 27)", style=dashed]; -"110 DequantizeLinear_/features/features.9/squeeze_activation/Relu_output_0_1" -> "113 /features/features.9/expand1x1/Conv" [label="(1, 48, 27, 27)", style=solid]; -"110 DequantizeLinear_/features/features.9/squeeze_activation/Relu_output_0_1" -> "117 /features/features.9/expand3x3/Conv" [label="(1, 48, 27, 27)", style=solid]; -"111 QuantizeLinear_features.9.expand1x1.weight_1" -> "112 DequantizeLinear_features.9.expand1x1.weight_1" [label="(192, 48, 1, 1)", style=dashed]; -"112 DequantizeLinear_features.9.expand1x1.weight_1" -> "113 /features/features.9/expand1x1/Conv" [label="(192, 48, 1, 1)", style=solid]; -"113 /features/features.9/expand1x1/Conv" -> "114 /features/features.9/expand1x1_activation/Relu" [label="(1, 192, 27, 27)", style=solid]; -"114 /features/features.9/expand1x1_activation/Relu" -> "119 QuantizeLinear_/features/features.9/expand1x1_activation/Relu_output_0_1" [label="(1, 192, 27, 27)", style=solid]; -"115 QuantizeLinear_features.9.expand3x3.weight_1" -> "116 DequantizeLinear_features.9.expand3x3.weight_1" [label="(192, 48, 3, 3)", style=dashed]; -"116 DequantizeLinear_features.9.expand3x3.weight_1" -> "117 /features/features.9/expand3x3/Conv" [label="(192, 48, 3, 3)", style=solid]; -"117 /features/features.9/expand3x3/Conv" -> "118 /features/features.9/expand3x3_activation/Relu" [label="(1, 192, 27, 27)", style=solid]; -"118 /features/features.9/expand3x3_activation/Relu" -> "121 QuantizeLinear_/features/features.9/expand3x3_activation/Relu_output_0_1" [label="(1, 192, 27, 27)", style=solid]; -"119 QuantizeLinear_/features/features.9/expand1x1_activation/Relu_output_0_1" -> "120 DequantizeLinear_/features/features.9/expand1x1_activation/Relu_output_0_1" [label="(1, 192, 27, 27)", style=dashed]; -"120 DequantizeLinear_/features/features.9/expand1x1_activation/Relu_output_0_1" -> "123 /features/features.9/Concat" [label="(1, 192, 27, 27)", style=solid]; -"121 QuantizeLinear_/features/features.9/expand3x3_activation/Relu_output_0_1" -> "122 DequantizeLinear_/features/features.9/expand3x3_activation/Relu_output_0_1" [label="(1, 192, 27, 27)", style=dashed]; -"122 DequantizeLinear_/features/features.9/expand3x3_activation/Relu_output_0_1" -> "123 /features/features.9/Concat" [label="(1, 192, 27, 27)", style=solid]; -"123 /features/features.9/Concat" -> "126 /features/features.10/squeeze/Conv" [label="(1, 384, 27, 27)", style=solid]; -"124 QuantizeLinear_features.10.squeeze.weight_1" -> "125 DequantizeLinear_features.10.squeeze.weight_1" [label="(64, 384, 1, 1)", style=dashed]; -"125 DequantizeLinear_features.10.squeeze.weight_1" -> "126 /features/features.10/squeeze/Conv" [label="(64, 384, 1, 1)", style=solid]; -"126 /features/features.10/squeeze/Conv" -> "127 /features/features.10/squeeze_activation/Relu" [label="(1, 64, 27, 27)", style=solid]; -"127 /features/features.10/squeeze_activation/Relu" -> "128 QuantizeLinear_/features/features.10/squeeze_activation/Relu_output_0_1" [label="(1, 64, 27, 27)", style=solid]; -"128 QuantizeLinear_/features/features.10/squeeze_activation/Relu_output_0_1" -> "129 DequantizeLinear_/features/features.10/squeeze_activation/Relu_output_0_1" [label="(1, 64, 27, 27)", style=dashed]; -"129 DequantizeLinear_/features/features.10/squeeze_activation/Relu_output_0_1" -> "132 /features/features.10/expand1x1/Conv" [label="(1, 64, 27, 27)", style=solid]; -"129 DequantizeLinear_/features/features.10/squeeze_activation/Relu_output_0_1" -> "136 /features/features.10/expand3x3/Conv" [label="(1, 64, 27, 27)", style=solid]; -"130 QuantizeLinear_features.10.expand1x1.weight_1" -> "131 DequantizeLinear_features.10.expand1x1.weight_1" [label="(256, 64, 1, 1)", style=dashed]; -"131 DequantizeLinear_features.10.expand1x1.weight_1" -> "132 /features/features.10/expand1x1/Conv" [label="(256, 64, 1, 1)", style=solid]; -"132 /features/features.10/expand1x1/Conv" -> "133 /features/features.10/expand1x1_activation/Relu" [label="(1, 256, 27, 27)", style=solid]; -"133 /features/features.10/expand1x1_activation/Relu" -> "140 QuantizeLinear_/features/features.10/expand1x1_activation/Relu_output_0_1" [label="(1, 256, 27, 27)", style=solid]; -"134 QuantizeLinear_features.10.expand3x3.weight_1" -> "135 DequantizeLinear_features.10.expand3x3.weight_1" [label="(256, 64, 3, 3)", style=dashed]; -"135 DequantizeLinear_features.10.expand3x3.weight_1" -> "136 /features/features.10/expand3x3/Conv" [label="(256, 64, 3, 3)", style=solid]; -"136 /features/features.10/expand3x3/Conv" -> "137 /features/features.10/expand3x3_activation/Relu" [label="(1, 256, 27, 27)", style=solid]; -"137 /features/features.10/expand3x3_activation/Relu" -> "138 QuantizeLinear_/features/features.10/expand3x3_activation/Relu_output_0_1" [label="(1, 256, 27, 27)", style=solid]; -"138 QuantizeLinear_/features/features.10/expand3x3_activation/Relu_output_0_1" -> "139 DequantizeLinear_/features/features.10/expand3x3_activation/Relu_output_0_1" [label="(1, 256, 27, 27)", style=dashed]; -"139 DequantizeLinear_/features/features.10/expand3x3_activation/Relu_output_0_1" -> "142 /features/features.10/Concat" [label="(1, 256, 27, 27)", style=solid]; -"140 QuantizeLinear_/features/features.10/expand1x1_activation/Relu_output_0_1" -> "141 DequantizeLinear_/features/features.10/expand1x1_activation/Relu_output_0_1" [label="(1, 256, 27, 27)", style=dashed]; -"141 DequantizeLinear_/features/features.10/expand1x1_activation/Relu_output_0_1" -> "142 /features/features.10/Concat" [label="(1, 256, 27, 27)", style=solid]; -"142 /features/features.10/Concat" -> "143 /features/features.11/MaxPool" [label="(1, 512, 27, 27)", style=solid]; -"143 /features/features.11/MaxPool" -> "146 /features/features.12/squeeze/Conv" [label="(1, 512, 13, 13)", style=solid]; -"144 QuantizeLinear_features.12.squeeze.weight_1" -> "145 DequantizeLinear_features.12.squeeze.weight_1" [label="(64, 512, 1, 1)", style=dashed]; -"145 DequantizeLinear_features.12.squeeze.weight_1" -> "146 /features/features.12/squeeze/Conv" [label="(64, 512, 1, 1)", style=solid]; -"146 /features/features.12/squeeze/Conv" -> "147 /features/features.12/squeeze_activation/Relu" [label="(1, 64, 13, 13)", style=solid]; -"147 /features/features.12/squeeze_activation/Relu" -> "148 QuantizeLinear_/features/features.12/squeeze_activation/Relu_output_0_1" [label="(1, 64, 13, 13)", style=solid]; -"148 QuantizeLinear_/features/features.12/squeeze_activation/Relu_output_0_1" -> "149 DequantizeLinear_/features/features.12/squeeze_activation/Relu_output_0_1" [label="(1, 64, 13, 13)", style=dashed]; -"149 DequantizeLinear_/features/features.12/squeeze_activation/Relu_output_0_1" -> "152 /features/features.12/expand1x1/Conv" [label="(1, 64, 13, 13)", style=solid]; -"149 DequantizeLinear_/features/features.12/squeeze_activation/Relu_output_0_1" -> "156 /features/features.12/expand3x3/Conv" [label="(1, 64, 13, 13)", style=solid]; -"150 QuantizeLinear_features.12.expand1x1.weight_1" -> "151 DequantizeLinear_features.12.expand1x1.weight_1" [label="(256, 64, 1, 1)", style=dashed]; -"151 DequantizeLinear_features.12.expand1x1.weight_1" -> "152 /features/features.12/expand1x1/Conv" [label="(256, 64, 1, 1)", style=solid]; -"152 /features/features.12/expand1x1/Conv" -> "153 /features/features.12/expand1x1_activation/Relu" [label="(1, 256, 13, 13)", style=solid]; -"153 /features/features.12/expand1x1_activation/Relu" -> "160 QuantizeLinear_/features/features.12/expand1x1_activation/Relu_output_0_1" [label="(1, 256, 13, 13)", style=solid]; -"154 QuantizeLinear_features.12.expand3x3.weight_1" -> "155 DequantizeLinear_features.12.expand3x3.weight_1" [label="(256, 64, 3, 3)", style=dashed]; -"155 DequantizeLinear_features.12.expand3x3.weight_1" -> "156 /features/features.12/expand3x3/Conv" [label="(256, 64, 3, 3)", style=solid]; -"156 /features/features.12/expand3x3/Conv" -> "157 /features/features.12/expand3x3_activation/Relu" [label="(1, 256, 13, 13)", style=solid]; -"157 /features/features.12/expand3x3_activation/Relu" -> "158 QuantizeLinear_/features/features.12/expand3x3_activation/Relu_output_0_1" [label="(1, 256, 13, 13)", style=solid]; -"158 QuantizeLinear_/features/features.12/expand3x3_activation/Relu_output_0_1" -> "159 DequantizeLinear_/features/features.12/expand3x3_activation/Relu_output_0_1" [label="(1, 256, 13, 13)", style=dashed]; -"159 DequantizeLinear_/features/features.12/expand3x3_activation/Relu_output_0_1" -> "162 /features/features.12/Concat" [label="(1, 256, 13, 13)", style=solid]; -"160 QuantizeLinear_/features/features.12/expand1x1_activation/Relu_output_0_1" -> "161 DequantizeLinear_/features/features.12/expand1x1_activation/Relu_output_0_1" [label="(1, 256, 13, 13)", style=dashed]; -"161 DequantizeLinear_/features/features.12/expand1x1_activation/Relu_output_0_1" -> "162 /features/features.12/Concat" [label="(1, 256, 13, 13)", style=solid]; -"162 /features/features.12/Concat" -> "165 /classifier/classifier.1/Conv" [label="(1, 512, 13, 13)", style=solid]; -"163 QuantizeLinear_classifier.1.weight_1" -> "164 DequantizeLinear_classifier.1.weight_1" [label="(1000, 512, 1, 1)", style=dashed]; -"164 DequantizeLinear_classifier.1.weight_1" -> "165 /classifier/classifier.1/Conv" [label="(1000, 512, 1, 1)", style=solid]; -"165 /classifier/classifier.1/Conv" -> "166 /classifier/classifier.2/Relu" [label="(1, 1000, 13, 13)", style=solid]; -"166 /classifier/classifier.2/Relu" -> "167 QuantizeLinear_/classifier/classifier.2/Relu_output_0_1" [label="(1, 1000, 13, 13)", style=solid]; -"167 QuantizeLinear_/classifier/classifier.2/Relu_output_0_1" -> "168 DequantizeLinear_/classifier/classifier.2/Relu_output_0_1" [label="(1, 1000, 13, 13)", style=dashed]; -"168 DequantizeLinear_/classifier/classifier.2/Relu_output_0_1" -> "169 /classifier/classifier.3/GlobalAveragePool" [label="(1, 1000, 13, 13)", style=solid]; -"169 /classifier/classifier.3/GlobalAveragePool" -> "170 /Flatten" [label="(1, 1000, 1, 1)", style=solid]; -"170 /Flatten" -> "172 nncf_model_output_0" [label="(1, 1000)", style=solid]; -"171 nncf_model_input_0" -> "0 QuantizeLinear_input.1_1" [label="(1, 3, 224, 224)", style=solid]; +"0 QuantizeLinear_input.1_1" -> "1 DequantizeLinear_input.1_1" [label="[1, 3, 224, 224]", style=dashed]; +"1 DequantizeLinear_input.1_1" -> "4 /features/features.0/Conv" [label="[1, 3, 224, 224]", style=solid]; +"2 QuantizeLinear_features.0.weight_1" -> "3 DequantizeLinear_features.0.weight_1" [label="[96, 3, 7, 7]", style=dashed]; +"3 DequantizeLinear_features.0.weight_1" -> "4 /features/features.0/Conv" [label="[96, 3, 7, 7]", style=solid]; +"4 /features/features.0/Conv" -> "5 /features/features.1/Relu" [label="[1, 96, 109, 109]", style=solid]; +"5 /features/features.1/Relu" -> "6 QuantizeLinear_/features/features.1/Relu_output_0_1" [label="[1, 96, 109, 109]", style=solid]; +"6 QuantizeLinear_/features/features.1/Relu_output_0_1" -> "7 DequantizeLinear_/features/features.1/Relu_output_0_1" [label="[1, 96, 109, 109]", style=dashed]; +"7 DequantizeLinear_/features/features.1/Relu_output_0_1" -> "8 /features/features.2/MaxPool" [label="[1, 96, 109, 109]", style=solid]; +"8 /features/features.2/MaxPool" -> "11 /features/features.3/squeeze/Conv" [label="[1, 96, 54, 54]", style=solid]; +"9 QuantizeLinear_features.3.squeeze.weight_1" -> "10 DequantizeLinear_features.3.squeeze.weight_1" [label="[16, 96, 1, 1]", style=dashed]; +"10 DequantizeLinear_features.3.squeeze.weight_1" -> "11 /features/features.3/squeeze/Conv" [label="[16, 96, 1, 1]", style=solid]; +"11 /features/features.3/squeeze/Conv" -> "12 /features/features.3/squeeze_activation/Relu" [label="[1, 16, 54, 54]", style=solid]; +"12 /features/features.3/squeeze_activation/Relu" -> "13 QuantizeLinear_/features/features.3/squeeze_activation/Relu_output_0_1" [label="[1, 16, 54, 54]", style=solid]; +"13 QuantizeLinear_/features/features.3/squeeze_activation/Relu_output_0_1" -> "14 DequantizeLinear_/features/features.3/squeeze_activation/Relu_output_0_1" [label="[1, 16, 54, 54]", style=dashed]; +"14 DequantizeLinear_/features/features.3/squeeze_activation/Relu_output_0_1" -> "17 /features/features.3/expand1x1/Conv" [label="[1, 16, 54, 54]", style=solid]; +"14 DequantizeLinear_/features/features.3/squeeze_activation/Relu_output_0_1" -> "21 /features/features.3/expand3x3/Conv" [label="[1, 16, 54, 54]", style=solid]; +"15 QuantizeLinear_features.3.expand1x1.weight_1" -> "16 DequantizeLinear_features.3.expand1x1.weight_1" [label="[64, 16, 1, 1]", style=dashed]; +"16 DequantizeLinear_features.3.expand1x1.weight_1" -> "17 /features/features.3/expand1x1/Conv" [label="[64, 16, 1, 1]", style=solid]; +"17 /features/features.3/expand1x1/Conv" -> "18 /features/features.3/expand1x1_activation/Relu" [label="[1, 64, 54, 54]", style=solid]; +"18 /features/features.3/expand1x1_activation/Relu" -> "23 QuantizeLinear_/features/features.3/expand1x1_activation/Relu_output_0_1" [label="[1, 64, 54, 54]", style=solid]; +"19 QuantizeLinear_features.3.expand3x3.weight_1" -> "20 DequantizeLinear_features.3.expand3x3.weight_1" [label="[64, 16, 3, 3]", style=dashed]; +"20 DequantizeLinear_features.3.expand3x3.weight_1" -> "21 /features/features.3/expand3x3/Conv" [label="[64, 16, 3, 3]", style=solid]; +"21 /features/features.3/expand3x3/Conv" -> "22 /features/features.3/expand3x3_activation/Relu" [label="[1, 64, 54, 54]", style=solid]; +"22 /features/features.3/expand3x3_activation/Relu" -> "25 QuantizeLinear_/features/features.3/expand3x3_activation/Relu_output_0_1" [label="[1, 64, 54, 54]", style=solid]; +"23 QuantizeLinear_/features/features.3/expand1x1_activation/Relu_output_0_1" -> "24 DequantizeLinear_/features/features.3/expand1x1_activation/Relu_output_0_1" [label="[1, 64, 54, 54]", style=dashed]; +"24 DequantizeLinear_/features/features.3/expand1x1_activation/Relu_output_0_1" -> "27 /features/features.3/Concat" [label="[1, 64, 54, 54]", style=solid]; +"25 QuantizeLinear_/features/features.3/expand3x3_activation/Relu_output_0_1" -> "26 DequantizeLinear_/features/features.3/expand3x3_activation/Relu_output_0_1" [label="[1, 64, 54, 54]", style=dashed]; +"26 DequantizeLinear_/features/features.3/expand3x3_activation/Relu_output_0_1" -> "27 /features/features.3/Concat" [label="[1, 64, 54, 54]", style=solid]; +"27 /features/features.3/Concat" -> "30 /features/features.4/squeeze/Conv" [label="[1, 128, 54, 54]", style=solid]; +"28 QuantizeLinear_features.4.squeeze.weight_1" -> "29 DequantizeLinear_features.4.squeeze.weight_1" [label="[16, 128, 1, 1]", style=dashed]; +"29 DequantizeLinear_features.4.squeeze.weight_1" -> "30 /features/features.4/squeeze/Conv" [label="[16, 128, 1, 1]", style=solid]; +"30 /features/features.4/squeeze/Conv" -> "31 /features/features.4/squeeze_activation/Relu" [label="[1, 16, 54, 54]", style=solid]; +"31 /features/features.4/squeeze_activation/Relu" -> "32 QuantizeLinear_/features/features.4/squeeze_activation/Relu_output_0_1" [label="[1, 16, 54, 54]", style=solid]; +"32 QuantizeLinear_/features/features.4/squeeze_activation/Relu_output_0_1" -> "33 DequantizeLinear_/features/features.4/squeeze_activation/Relu_output_0_1" [label="[1, 16, 54, 54]", style=dashed]; +"33 DequantizeLinear_/features/features.4/squeeze_activation/Relu_output_0_1" -> "36 /features/features.4/expand1x1/Conv" [label="[1, 16, 54, 54]", style=solid]; +"33 DequantizeLinear_/features/features.4/squeeze_activation/Relu_output_0_1" -> "40 /features/features.4/expand3x3/Conv" [label="[1, 16, 54, 54]", style=solid]; +"34 QuantizeLinear_features.4.expand1x1.weight_1" -> "35 DequantizeLinear_features.4.expand1x1.weight_1" [label="[64, 16, 1, 1]", style=dashed]; +"35 DequantizeLinear_features.4.expand1x1.weight_1" -> "36 /features/features.4/expand1x1/Conv" [label="[64, 16, 1, 1]", style=solid]; +"36 /features/features.4/expand1x1/Conv" -> "37 /features/features.4/expand1x1_activation/Relu" [label="[1, 64, 54, 54]", style=solid]; +"37 /features/features.4/expand1x1_activation/Relu" -> "42 QuantizeLinear_/features/features.4/expand1x1_activation/Relu_output_0_1" [label="[1, 64, 54, 54]", style=solid]; +"38 QuantizeLinear_features.4.expand3x3.weight_1" -> "39 DequantizeLinear_features.4.expand3x3.weight_1" [label="[64, 16, 3, 3]", style=dashed]; +"39 DequantizeLinear_features.4.expand3x3.weight_1" -> "40 /features/features.4/expand3x3/Conv" [label="[64, 16, 3, 3]", style=solid]; +"40 /features/features.4/expand3x3/Conv" -> "41 /features/features.4/expand3x3_activation/Relu" [label="[1, 64, 54, 54]", style=solid]; +"41 /features/features.4/expand3x3_activation/Relu" -> "44 QuantizeLinear_/features/features.4/expand3x3_activation/Relu_output_0_1" [label="[1, 64, 54, 54]", style=solid]; +"42 QuantizeLinear_/features/features.4/expand1x1_activation/Relu_output_0_1" -> "43 DequantizeLinear_/features/features.4/expand1x1_activation/Relu_output_0_1" [label="[1, 64, 54, 54]", style=dashed]; +"43 DequantizeLinear_/features/features.4/expand1x1_activation/Relu_output_0_1" -> "46 /features/features.4/Concat" [label="[1, 64, 54, 54]", style=solid]; +"44 QuantizeLinear_/features/features.4/expand3x3_activation/Relu_output_0_1" -> "45 DequantizeLinear_/features/features.4/expand3x3_activation/Relu_output_0_1" [label="[1, 64, 54, 54]", style=dashed]; +"45 DequantizeLinear_/features/features.4/expand3x3_activation/Relu_output_0_1" -> "46 /features/features.4/Concat" [label="[1, 64, 54, 54]", style=solid]; +"46 /features/features.4/Concat" -> "49 /features/features.5/squeeze/Conv" [label="[1, 128, 54, 54]", style=solid]; +"47 QuantizeLinear_features.5.squeeze.weight_1" -> "48 DequantizeLinear_features.5.squeeze.weight_1" [label="[32, 128, 1, 1]", style=dashed]; +"48 DequantizeLinear_features.5.squeeze.weight_1" -> "49 /features/features.5/squeeze/Conv" [label="[32, 128, 1, 1]", style=solid]; +"49 /features/features.5/squeeze/Conv" -> "50 /features/features.5/squeeze_activation/Relu" [label="[1, 32, 54, 54]", style=solid]; +"50 /features/features.5/squeeze_activation/Relu" -> "51 QuantizeLinear_/features/features.5/squeeze_activation/Relu_output_0_1" [label="[1, 32, 54, 54]", style=solid]; +"51 QuantizeLinear_/features/features.5/squeeze_activation/Relu_output_0_1" -> "52 DequantizeLinear_/features/features.5/squeeze_activation/Relu_output_0_1" [label="[1, 32, 54, 54]", style=dashed]; +"52 DequantizeLinear_/features/features.5/squeeze_activation/Relu_output_0_1" -> "55 /features/features.5/expand1x1/Conv" [label="[1, 32, 54, 54]", style=solid]; +"52 DequantizeLinear_/features/features.5/squeeze_activation/Relu_output_0_1" -> "59 /features/features.5/expand3x3/Conv" [label="[1, 32, 54, 54]", style=solid]; +"53 QuantizeLinear_features.5.expand1x1.weight_1" -> "54 DequantizeLinear_features.5.expand1x1.weight_1" [label="[128, 32, 1, 1]", style=dashed]; +"54 DequantizeLinear_features.5.expand1x1.weight_1" -> "55 /features/features.5/expand1x1/Conv" [label="[128, 32, 1, 1]", style=solid]; +"55 /features/features.5/expand1x1/Conv" -> "56 /features/features.5/expand1x1_activation/Relu" [label="[1, 128, 54, 54]", style=solid]; +"56 /features/features.5/expand1x1_activation/Relu" -> "63 QuantizeLinear_/features/features.5/expand1x1_activation/Relu_output_0_1" [label="[1, 128, 54, 54]", style=solid]; +"57 QuantizeLinear_features.5.expand3x3.weight_1" -> "58 DequantizeLinear_features.5.expand3x3.weight_1" [label="[128, 32, 3, 3]", style=dashed]; +"58 DequantizeLinear_features.5.expand3x3.weight_1" -> "59 /features/features.5/expand3x3/Conv" [label="[128, 32, 3, 3]", style=solid]; +"59 /features/features.5/expand3x3/Conv" -> "60 /features/features.5/expand3x3_activation/Relu" [label="[1, 128, 54, 54]", style=solid]; +"60 /features/features.5/expand3x3_activation/Relu" -> "61 QuantizeLinear_/features/features.5/expand3x3_activation/Relu_output_0_1" [label="[1, 128, 54, 54]", style=solid]; +"61 QuantizeLinear_/features/features.5/expand3x3_activation/Relu_output_0_1" -> "62 DequantizeLinear_/features/features.5/expand3x3_activation/Relu_output_0_1" [label="[1, 128, 54, 54]", style=dashed]; +"62 DequantizeLinear_/features/features.5/expand3x3_activation/Relu_output_0_1" -> "65 /features/features.5/Concat" [label="[1, 128, 54, 54]", style=solid]; +"63 QuantizeLinear_/features/features.5/expand1x1_activation/Relu_output_0_1" -> "64 DequantizeLinear_/features/features.5/expand1x1_activation/Relu_output_0_1" [label="[1, 128, 54, 54]", style=dashed]; +"64 DequantizeLinear_/features/features.5/expand1x1_activation/Relu_output_0_1" -> "65 /features/features.5/Concat" [label="[1, 128, 54, 54]", style=solid]; +"65 /features/features.5/Concat" -> "66 /features/features.6/MaxPool" [label="[1, 256, 54, 54]", style=solid]; +"66 /features/features.6/MaxPool" -> "69 /features/features.7/squeeze/Conv" [label="[1, 256, 27, 27]", style=solid]; +"67 QuantizeLinear_features.7.squeeze.weight_1" -> "68 DequantizeLinear_features.7.squeeze.weight_1" [label="[32, 256, 1, 1]", style=dashed]; +"68 DequantizeLinear_features.7.squeeze.weight_1" -> "69 /features/features.7/squeeze/Conv" [label="[32, 256, 1, 1]", style=solid]; +"69 /features/features.7/squeeze/Conv" -> "70 /features/features.7/squeeze_activation/Relu" [label="[1, 32, 27, 27]", style=solid]; +"70 /features/features.7/squeeze_activation/Relu" -> "71 QuantizeLinear_/features/features.7/squeeze_activation/Relu_output_0_1" [label="[1, 32, 27, 27]", style=solid]; +"71 QuantizeLinear_/features/features.7/squeeze_activation/Relu_output_0_1" -> "72 DequantizeLinear_/features/features.7/squeeze_activation/Relu_output_0_1" [label="[1, 32, 27, 27]", style=dashed]; +"72 DequantizeLinear_/features/features.7/squeeze_activation/Relu_output_0_1" -> "75 /features/features.7/expand1x1/Conv" [label="[1, 32, 27, 27]", style=solid]; +"72 DequantizeLinear_/features/features.7/squeeze_activation/Relu_output_0_1" -> "79 /features/features.7/expand3x3/Conv" [label="[1, 32, 27, 27]", style=solid]; +"73 QuantizeLinear_features.7.expand1x1.weight_1" -> "74 DequantizeLinear_features.7.expand1x1.weight_1" [label="[128, 32, 1, 1]", style=dashed]; +"74 DequantizeLinear_features.7.expand1x1.weight_1" -> "75 /features/features.7/expand1x1/Conv" [label="[128, 32, 1, 1]", style=solid]; +"75 /features/features.7/expand1x1/Conv" -> "76 /features/features.7/expand1x1_activation/Relu" [label="[1, 128, 27, 27]", style=solid]; +"76 /features/features.7/expand1x1_activation/Relu" -> "83 QuantizeLinear_/features/features.7/expand1x1_activation/Relu_output_0_1" [label="[1, 128, 27, 27]", style=solid]; +"77 QuantizeLinear_features.7.expand3x3.weight_1" -> "78 DequantizeLinear_features.7.expand3x3.weight_1" [label="[128, 32, 3, 3]", style=dashed]; +"78 DequantizeLinear_features.7.expand3x3.weight_1" -> "79 /features/features.7/expand3x3/Conv" [label="[128, 32, 3, 3]", style=solid]; +"79 /features/features.7/expand3x3/Conv" -> "80 /features/features.7/expand3x3_activation/Relu" [label="[1, 128, 27, 27]", style=solid]; +"80 /features/features.7/expand3x3_activation/Relu" -> "81 QuantizeLinear_/features/features.7/expand3x3_activation/Relu_output_0_1" [label="[1, 128, 27, 27]", style=solid]; +"81 QuantizeLinear_/features/features.7/expand3x3_activation/Relu_output_0_1" -> "82 DequantizeLinear_/features/features.7/expand3x3_activation/Relu_output_0_1" [label="[1, 128, 27, 27]", style=dashed]; +"82 DequantizeLinear_/features/features.7/expand3x3_activation/Relu_output_0_1" -> "85 /features/features.7/Concat" [label="[1, 128, 27, 27]", style=solid]; +"83 QuantizeLinear_/features/features.7/expand1x1_activation/Relu_output_0_1" -> "84 DequantizeLinear_/features/features.7/expand1x1_activation/Relu_output_0_1" [label="[1, 128, 27, 27]", style=dashed]; +"84 DequantizeLinear_/features/features.7/expand1x1_activation/Relu_output_0_1" -> "85 /features/features.7/Concat" [label="[1, 128, 27, 27]", style=solid]; +"85 /features/features.7/Concat" -> "88 /features/features.8/squeeze/Conv" [label="[1, 256, 27, 27]", style=solid]; +"86 QuantizeLinear_features.8.squeeze.weight_1" -> "87 DequantizeLinear_features.8.squeeze.weight_1" [label="[48, 256, 1, 1]", style=dashed]; +"87 DequantizeLinear_features.8.squeeze.weight_1" -> "88 /features/features.8/squeeze/Conv" [label="[48, 256, 1, 1]", style=solid]; +"88 /features/features.8/squeeze/Conv" -> "89 /features/features.8/squeeze_activation/Relu" [label="[1, 48, 27, 27]", style=solid]; +"89 /features/features.8/squeeze_activation/Relu" -> "90 QuantizeLinear_/features/features.8/squeeze_activation/Relu_output_0_1" [label="[1, 48, 27, 27]", style=solid]; +"90 QuantizeLinear_/features/features.8/squeeze_activation/Relu_output_0_1" -> "91 DequantizeLinear_/features/features.8/squeeze_activation/Relu_output_0_1" [label="[1, 48, 27, 27]", style=dashed]; +"91 DequantizeLinear_/features/features.8/squeeze_activation/Relu_output_0_1" -> "94 /features/features.8/expand1x1/Conv" [label="[1, 48, 27, 27]", style=solid]; +"91 DequantizeLinear_/features/features.8/squeeze_activation/Relu_output_0_1" -> "98 /features/features.8/expand3x3/Conv" [label="[1, 48, 27, 27]", style=solid]; +"92 QuantizeLinear_features.8.expand1x1.weight_1" -> "93 DequantizeLinear_features.8.expand1x1.weight_1" [label="[192, 48, 1, 1]", style=dashed]; +"93 DequantizeLinear_features.8.expand1x1.weight_1" -> "94 /features/features.8/expand1x1/Conv" [label="[192, 48, 1, 1]", style=solid]; +"94 /features/features.8/expand1x1/Conv" -> "95 /features/features.8/expand1x1_activation/Relu" [label="[1, 192, 27, 27]", style=solid]; +"95 /features/features.8/expand1x1_activation/Relu" -> "100 QuantizeLinear_/features/features.8/expand1x1_activation/Relu_output_0_1" [label="[1, 192, 27, 27]", style=solid]; +"96 QuantizeLinear_features.8.expand3x3.weight_1" -> "97 DequantizeLinear_features.8.expand3x3.weight_1" [label="[192, 48, 3, 3]", style=dashed]; +"97 DequantizeLinear_features.8.expand3x3.weight_1" -> "98 /features/features.8/expand3x3/Conv" [label="[192, 48, 3, 3]", style=solid]; +"98 /features/features.8/expand3x3/Conv" -> "99 /features/features.8/expand3x3_activation/Relu" [label="[1, 192, 27, 27]", style=solid]; +"99 /features/features.8/expand3x3_activation/Relu" -> "102 QuantizeLinear_/features/features.8/expand3x3_activation/Relu_output_0_1" [label="[1, 192, 27, 27]", style=solid]; +"100 QuantizeLinear_/features/features.8/expand1x1_activation/Relu_output_0_1" -> "101 DequantizeLinear_/features/features.8/expand1x1_activation/Relu_output_0_1" [label="[1, 192, 27, 27]", style=dashed]; +"101 DequantizeLinear_/features/features.8/expand1x1_activation/Relu_output_0_1" -> "104 /features/features.8/Concat" [label="[1, 192, 27, 27]", style=solid]; +"102 QuantizeLinear_/features/features.8/expand3x3_activation/Relu_output_0_1" -> "103 DequantizeLinear_/features/features.8/expand3x3_activation/Relu_output_0_1" [label="[1, 192, 27, 27]", style=dashed]; +"103 DequantizeLinear_/features/features.8/expand3x3_activation/Relu_output_0_1" -> "104 /features/features.8/Concat" [label="[1, 192, 27, 27]", style=solid]; +"104 /features/features.8/Concat" -> "107 /features/features.9/squeeze/Conv" [label="[1, 384, 27, 27]", style=solid]; +"105 QuantizeLinear_features.9.squeeze.weight_1" -> "106 DequantizeLinear_features.9.squeeze.weight_1" [label="[48, 384, 1, 1]", style=dashed]; +"106 DequantizeLinear_features.9.squeeze.weight_1" -> "107 /features/features.9/squeeze/Conv" [label="[48, 384, 1, 1]", style=solid]; +"107 /features/features.9/squeeze/Conv" -> "108 /features/features.9/squeeze_activation/Relu" [label="[1, 48, 27, 27]", style=solid]; +"108 /features/features.9/squeeze_activation/Relu" -> "109 QuantizeLinear_/features/features.9/squeeze_activation/Relu_output_0_1" [label="[1, 48, 27, 27]", style=solid]; +"109 QuantizeLinear_/features/features.9/squeeze_activation/Relu_output_0_1" -> "110 DequantizeLinear_/features/features.9/squeeze_activation/Relu_output_0_1" [label="[1, 48, 27, 27]", style=dashed]; +"110 DequantizeLinear_/features/features.9/squeeze_activation/Relu_output_0_1" -> "113 /features/features.9/expand1x1/Conv" [label="[1, 48, 27, 27]", style=solid]; +"110 DequantizeLinear_/features/features.9/squeeze_activation/Relu_output_0_1" -> "117 /features/features.9/expand3x3/Conv" [label="[1, 48, 27, 27]", style=solid]; +"111 QuantizeLinear_features.9.expand1x1.weight_1" -> "112 DequantizeLinear_features.9.expand1x1.weight_1" [label="[192, 48, 1, 1]", style=dashed]; +"112 DequantizeLinear_features.9.expand1x1.weight_1" -> "113 /features/features.9/expand1x1/Conv" [label="[192, 48, 1, 1]", style=solid]; +"113 /features/features.9/expand1x1/Conv" -> "114 /features/features.9/expand1x1_activation/Relu" [label="[1, 192, 27, 27]", style=solid]; +"114 /features/features.9/expand1x1_activation/Relu" -> "119 QuantizeLinear_/features/features.9/expand1x1_activation/Relu_output_0_1" [label="[1, 192, 27, 27]", style=solid]; +"115 QuantizeLinear_features.9.expand3x3.weight_1" -> "116 DequantizeLinear_features.9.expand3x3.weight_1" [label="[192, 48, 3, 3]", style=dashed]; +"116 DequantizeLinear_features.9.expand3x3.weight_1" -> "117 /features/features.9/expand3x3/Conv" [label="[192, 48, 3, 3]", style=solid]; +"117 /features/features.9/expand3x3/Conv" -> "118 /features/features.9/expand3x3_activation/Relu" [label="[1, 192, 27, 27]", style=solid]; +"118 /features/features.9/expand3x3_activation/Relu" -> "121 QuantizeLinear_/features/features.9/expand3x3_activation/Relu_output_0_1" [label="[1, 192, 27, 27]", style=solid]; +"119 QuantizeLinear_/features/features.9/expand1x1_activation/Relu_output_0_1" -> "120 DequantizeLinear_/features/features.9/expand1x1_activation/Relu_output_0_1" [label="[1, 192, 27, 27]", style=dashed]; +"120 DequantizeLinear_/features/features.9/expand1x1_activation/Relu_output_0_1" -> "123 /features/features.9/Concat" [label="[1, 192, 27, 27]", style=solid]; +"121 QuantizeLinear_/features/features.9/expand3x3_activation/Relu_output_0_1" -> "122 DequantizeLinear_/features/features.9/expand3x3_activation/Relu_output_0_1" [label="[1, 192, 27, 27]", style=dashed]; +"122 DequantizeLinear_/features/features.9/expand3x3_activation/Relu_output_0_1" -> "123 /features/features.9/Concat" [label="[1, 192, 27, 27]", style=solid]; +"123 /features/features.9/Concat" -> "126 /features/features.10/squeeze/Conv" [label="[1, 384, 27, 27]", style=solid]; +"124 QuantizeLinear_features.10.squeeze.weight_1" -> "125 DequantizeLinear_features.10.squeeze.weight_1" [label="[64, 384, 1, 1]", style=dashed]; +"125 DequantizeLinear_features.10.squeeze.weight_1" -> "126 /features/features.10/squeeze/Conv" [label="[64, 384, 1, 1]", style=solid]; +"126 /features/features.10/squeeze/Conv" -> "127 /features/features.10/squeeze_activation/Relu" [label="[1, 64, 27, 27]", style=solid]; +"127 /features/features.10/squeeze_activation/Relu" -> "128 QuantizeLinear_/features/features.10/squeeze_activation/Relu_output_0_1" [label="[1, 64, 27, 27]", style=solid]; +"128 QuantizeLinear_/features/features.10/squeeze_activation/Relu_output_0_1" -> "129 DequantizeLinear_/features/features.10/squeeze_activation/Relu_output_0_1" [label="[1, 64, 27, 27]", style=dashed]; +"129 DequantizeLinear_/features/features.10/squeeze_activation/Relu_output_0_1" -> "132 /features/features.10/expand1x1/Conv" [label="[1, 64, 27, 27]", style=solid]; +"129 DequantizeLinear_/features/features.10/squeeze_activation/Relu_output_0_1" -> "136 /features/features.10/expand3x3/Conv" [label="[1, 64, 27, 27]", style=solid]; +"130 QuantizeLinear_features.10.expand1x1.weight_1" -> "131 DequantizeLinear_features.10.expand1x1.weight_1" [label="[256, 64, 1, 1]", style=dashed]; +"131 DequantizeLinear_features.10.expand1x1.weight_1" -> "132 /features/features.10/expand1x1/Conv" [label="[256, 64, 1, 1]", style=solid]; +"132 /features/features.10/expand1x1/Conv" -> "133 /features/features.10/expand1x1_activation/Relu" [label="[1, 256, 27, 27]", style=solid]; +"133 /features/features.10/expand1x1_activation/Relu" -> "140 QuantizeLinear_/features/features.10/expand1x1_activation/Relu_output_0_1" [label="[1, 256, 27, 27]", style=solid]; +"134 QuantizeLinear_features.10.expand3x3.weight_1" -> "135 DequantizeLinear_features.10.expand3x3.weight_1" [label="[256, 64, 3, 3]", style=dashed]; +"135 DequantizeLinear_features.10.expand3x3.weight_1" -> "136 /features/features.10/expand3x3/Conv" [label="[256, 64, 3, 3]", style=solid]; +"136 /features/features.10/expand3x3/Conv" -> "137 /features/features.10/expand3x3_activation/Relu" [label="[1, 256, 27, 27]", style=solid]; +"137 /features/features.10/expand3x3_activation/Relu" -> "138 QuantizeLinear_/features/features.10/expand3x3_activation/Relu_output_0_1" [label="[1, 256, 27, 27]", style=solid]; +"138 QuantizeLinear_/features/features.10/expand3x3_activation/Relu_output_0_1" -> "139 DequantizeLinear_/features/features.10/expand3x3_activation/Relu_output_0_1" [label="[1, 256, 27, 27]", style=dashed]; +"139 DequantizeLinear_/features/features.10/expand3x3_activation/Relu_output_0_1" -> "142 /features/features.10/Concat" [label="[1, 256, 27, 27]", style=solid]; +"140 QuantizeLinear_/features/features.10/expand1x1_activation/Relu_output_0_1" -> "141 DequantizeLinear_/features/features.10/expand1x1_activation/Relu_output_0_1" [label="[1, 256, 27, 27]", style=dashed]; +"141 DequantizeLinear_/features/features.10/expand1x1_activation/Relu_output_0_1" -> "142 /features/features.10/Concat" [label="[1, 256, 27, 27]", style=solid]; +"142 /features/features.10/Concat" -> "143 /features/features.11/MaxPool" [label="[1, 512, 27, 27]", style=solid]; +"143 /features/features.11/MaxPool" -> "146 /features/features.12/squeeze/Conv" [label="[1, 512, 13, 13]", style=solid]; +"144 QuantizeLinear_features.12.squeeze.weight_1" -> "145 DequantizeLinear_features.12.squeeze.weight_1" [label="[64, 512, 1, 1]", style=dashed]; +"145 DequantizeLinear_features.12.squeeze.weight_1" -> "146 /features/features.12/squeeze/Conv" [label="[64, 512, 1, 1]", style=solid]; +"146 /features/features.12/squeeze/Conv" -> "147 /features/features.12/squeeze_activation/Relu" [label="[1, 64, 13, 13]", style=solid]; +"147 /features/features.12/squeeze_activation/Relu" -> "148 QuantizeLinear_/features/features.12/squeeze_activation/Relu_output_0_1" [label="[1, 64, 13, 13]", style=solid]; +"148 QuantizeLinear_/features/features.12/squeeze_activation/Relu_output_0_1" -> "149 DequantizeLinear_/features/features.12/squeeze_activation/Relu_output_0_1" [label="[1, 64, 13, 13]", style=dashed]; +"149 DequantizeLinear_/features/features.12/squeeze_activation/Relu_output_0_1" -> "152 /features/features.12/expand1x1/Conv" [label="[1, 64, 13, 13]", style=solid]; +"149 DequantizeLinear_/features/features.12/squeeze_activation/Relu_output_0_1" -> "156 /features/features.12/expand3x3/Conv" [label="[1, 64, 13, 13]", style=solid]; +"150 QuantizeLinear_features.12.expand1x1.weight_1" -> "151 DequantizeLinear_features.12.expand1x1.weight_1" [label="[256, 64, 1, 1]", style=dashed]; +"151 DequantizeLinear_features.12.expand1x1.weight_1" -> "152 /features/features.12/expand1x1/Conv" [label="[256, 64, 1, 1]", style=solid]; +"152 /features/features.12/expand1x1/Conv" -> "153 /features/features.12/expand1x1_activation/Relu" [label="[1, 256, 13, 13]", style=solid]; +"153 /features/features.12/expand1x1_activation/Relu" -> "160 QuantizeLinear_/features/features.12/expand1x1_activation/Relu_output_0_1" [label="[1, 256, 13, 13]", style=solid]; +"154 QuantizeLinear_features.12.expand3x3.weight_1" -> "155 DequantizeLinear_features.12.expand3x3.weight_1" [label="[256, 64, 3, 3]", style=dashed]; +"155 DequantizeLinear_features.12.expand3x3.weight_1" -> "156 /features/features.12/expand3x3/Conv" [label="[256, 64, 3, 3]", style=solid]; +"156 /features/features.12/expand3x3/Conv" -> "157 /features/features.12/expand3x3_activation/Relu" [label="[1, 256, 13, 13]", style=solid]; +"157 /features/features.12/expand3x3_activation/Relu" -> "158 QuantizeLinear_/features/features.12/expand3x3_activation/Relu_output_0_1" [label="[1, 256, 13, 13]", style=solid]; +"158 QuantizeLinear_/features/features.12/expand3x3_activation/Relu_output_0_1" -> "159 DequantizeLinear_/features/features.12/expand3x3_activation/Relu_output_0_1" [label="[1, 256, 13, 13]", style=dashed]; +"159 DequantizeLinear_/features/features.12/expand3x3_activation/Relu_output_0_1" -> "162 /features/features.12/Concat" [label="[1, 256, 13, 13]", style=solid]; +"160 QuantizeLinear_/features/features.12/expand1x1_activation/Relu_output_0_1" -> "161 DequantizeLinear_/features/features.12/expand1x1_activation/Relu_output_0_1" [label="[1, 256, 13, 13]", style=dashed]; +"161 DequantizeLinear_/features/features.12/expand1x1_activation/Relu_output_0_1" -> "162 /features/features.12/Concat" [label="[1, 256, 13, 13]", style=solid]; +"162 /features/features.12/Concat" -> "165 /classifier/classifier.1/Conv" [label="[1, 512, 13, 13]", style=solid]; +"163 QuantizeLinear_classifier.1.weight_1" -> "164 DequantizeLinear_classifier.1.weight_1" [label="[1000, 512, 1, 1]", style=dashed]; +"164 DequantizeLinear_classifier.1.weight_1" -> "165 /classifier/classifier.1/Conv" [label="[1000, 512, 1, 1]", style=solid]; +"165 /classifier/classifier.1/Conv" -> "166 /classifier/classifier.2/Relu" [label="[1, 1000, 13, 13]", style=solid]; +"166 /classifier/classifier.2/Relu" -> "167 QuantizeLinear_/classifier/classifier.2/Relu_output_0_1" [label="[1, 1000, 13, 13]", style=solid]; +"167 QuantizeLinear_/classifier/classifier.2/Relu_output_0_1" -> "168 DequantizeLinear_/classifier/classifier.2/Relu_output_0_1" [label="[1, 1000, 13, 13]", style=dashed]; +"168 DequantizeLinear_/classifier/classifier.2/Relu_output_0_1" -> "169 /classifier/classifier.3/GlobalAveragePool" [label="[1, 1000, 13, 13]", style=solid]; +"169 /classifier/classifier.3/GlobalAveragePool" -> "170 /Flatten" [label="[1, 1000, 1, 1]", style=solid]; +"170 /Flatten" -> "172 nncf_model_output_0" [label="[1, 1000]", style=solid]; +"171 nncf_model_input_0" -> "0 QuantizeLinear_input.1_1" [label="[1, 3, 224, 224]", style=solid]; } diff --git a/tests/onnx/data/reference_graphs/quantization/ssd-12.dot b/tests/onnx/data/reference_graphs/quantization/ssd-12.dot index 28bcdbfe886..5c1dbd0ede1 100644 --- a/tests/onnx/data/reference_graphs/quantization/ssd-12.dot +++ b/tests/onnx/data/reference_graphs/quantization/ssd-12.dot @@ -308,424 +308,481 @@ strict digraph { "306 Gather_341" [id=306, type=Gather]; "307 Constant_342" [id=307, type=Constant]; "308 Constant_343" [id=308, type=Constant]; -"309 Unsqueeze_344" [id=309, type=Unsqueeze]; -"310 Unsqueeze_345" [id=310, type=Unsqueeze]; -"311 Unsqueeze_346" [id=311, type=Unsqueeze]; -"312 Concat_347" [id=312, type=Concat]; -"313 Reshape_348" [id=313, type=Reshape]; -"314 QuantizeLinear_backbone.conf.0.weight_1" [id=314, type=QuantizeLinear]; -"315 DequantizeLinear_backbone.conf.0.weight_1" [id=315, type=DequantizeLinear]; -"316 Conv_349" [id=316, type=Conv]; -"317 Constant_350" [id=317, type=Constant]; -"318 Shape_351" [id=318, type=Shape]; -"319 Gather_352" [id=319, type=Gather]; -"320 Constant_353" [id=320, type=Constant]; -"321 Constant_354" [id=321, type=Constant]; -"322 Unsqueeze_355" [id=322, type=Unsqueeze]; -"323 Unsqueeze_356" [id=323, type=Unsqueeze]; -"324 Unsqueeze_357" [id=324, type=Unsqueeze]; -"325 Concat_358" [id=325, type=Concat]; -"326 Reshape_359" [id=326, type=Reshape]; -"327 QuantizeLinear_backbone.loc.1.weight_1" [id=327, type=QuantizeLinear]; -"328 DequantizeLinear_backbone.loc.1.weight_1" [id=328, type=DequantizeLinear]; -"329 Conv_360" [id=329, type=Conv]; -"330 Constant_361" [id=330, type=Constant]; -"331 Shape_362" [id=331, type=Shape]; -"332 Gather_363" [id=332, type=Gather]; -"333 Constant_364" [id=333, type=Constant]; -"334 Constant_365" [id=334, type=Constant]; -"335 Unsqueeze_366" [id=335, type=Unsqueeze]; -"336 Unsqueeze_367" [id=336, type=Unsqueeze]; -"337 Unsqueeze_368" [id=337, type=Unsqueeze]; -"338 Concat_369" [id=338, type=Concat]; -"339 Reshape_370" [id=339, type=Reshape]; -"340 QuantizeLinear_backbone.conf.1.weight_1" [id=340, type=QuantizeLinear]; -"341 DequantizeLinear_backbone.conf.1.weight_1" [id=341, type=DequantizeLinear]; -"342 Conv_371" [id=342, type=Conv]; -"343 Constant_372" [id=343, type=Constant]; -"344 Shape_373" [id=344, type=Shape]; -"345 Gather_374" [id=345, type=Gather]; -"346 Constant_375" [id=346, type=Constant]; -"347 Constant_376" [id=347, type=Constant]; -"348 Unsqueeze_377" [id=348, type=Unsqueeze]; -"349 Unsqueeze_378" [id=349, type=Unsqueeze]; -"350 Unsqueeze_379" [id=350, type=Unsqueeze]; -"351 Concat_380" [id=351, type=Concat]; -"352 Reshape_381" [id=352, type=Reshape]; -"353 QuantizeLinear_backbone.loc.2.weight_1" [id=353, type=QuantizeLinear]; -"354 DequantizeLinear_backbone.loc.2.weight_1" [id=354, type=DequantizeLinear]; -"355 Conv_382" [id=355, type=Conv]; -"356 Constant_383" [id=356, type=Constant]; -"357 Shape_384" [id=357, type=Shape]; -"358 Gather_385" [id=358, type=Gather]; -"359 Constant_386" [id=359, type=Constant]; -"360 Constant_387" [id=360, type=Constant]; -"361 Unsqueeze_388" [id=361, type=Unsqueeze]; -"362 Unsqueeze_389" [id=362, type=Unsqueeze]; -"363 Unsqueeze_390" [id=363, type=Unsqueeze]; -"364 Concat_391" [id=364, type=Concat]; -"365 Reshape_392" [id=365, type=Reshape]; -"366 QuantizeLinear_backbone.conf.2.weight_1" [id=366, type=QuantizeLinear]; -"367 DequantizeLinear_backbone.conf.2.weight_1" [id=367, type=DequantizeLinear]; -"368 Conv_393" [id=368, type=Conv]; -"369 Constant_394" [id=369, type=Constant]; -"370 Shape_395" [id=370, type=Shape]; -"371 Gather_396" [id=371, type=Gather]; -"372 Constant_397" [id=372, type=Constant]; -"373 Constant_398" [id=373, type=Constant]; -"374 Unsqueeze_399" [id=374, type=Unsqueeze]; -"375 Unsqueeze_400" [id=375, type=Unsqueeze]; -"376 Unsqueeze_401" [id=376, type=Unsqueeze]; -"377 Concat_402" [id=377, type=Concat]; -"378 Reshape_403" [id=378, type=Reshape]; -"379 QuantizeLinear_backbone.loc.3.weight_1" [id=379, type=QuantizeLinear]; -"380 DequantizeLinear_backbone.loc.3.weight_1" [id=380, type=DequantizeLinear]; -"381 Conv_404" [id=381, type=Conv]; -"382 Constant_405" [id=382, type=Constant]; -"383 Shape_406" [id=383, type=Shape]; -"384 Gather_407" [id=384, type=Gather]; -"385 Constant_408" [id=385, type=Constant]; -"386 Constant_409" [id=386, type=Constant]; -"387 Unsqueeze_410" [id=387, type=Unsqueeze]; -"388 Unsqueeze_411" [id=388, type=Unsqueeze]; -"389 Unsqueeze_412" [id=389, type=Unsqueeze]; -"390 Concat_413" [id=390, type=Concat]; -"391 Reshape_414" [id=391, type=Reshape]; -"392 QuantizeLinear_backbone.conf.3.weight_1" [id=392, type=QuantizeLinear]; -"393 DequantizeLinear_backbone.conf.3.weight_1" [id=393, type=DequantizeLinear]; -"394 Conv_415" [id=394, type=Conv]; -"395 Constant_416" [id=395, type=Constant]; -"396 Shape_417" [id=396, type=Shape]; -"397 Gather_418" [id=397, type=Gather]; -"398 Constant_419" [id=398, type=Constant]; -"399 Constant_420" [id=399, type=Constant]; -"400 Unsqueeze_421" [id=400, type=Unsqueeze]; -"401 Unsqueeze_422" [id=401, type=Unsqueeze]; -"402 Unsqueeze_423" [id=402, type=Unsqueeze]; -"403 Concat_424" [id=403, type=Concat]; -"404 Reshape_425" [id=404, type=Reshape]; -"405 QuantizeLinear_backbone.loc.4.weight_1" [id=405, type=QuantizeLinear]; -"406 DequantizeLinear_backbone.loc.4.weight_1" [id=406, type=DequantizeLinear]; -"407 Conv_426" [id=407, type=Conv]; -"408 Constant_427" [id=408, type=Constant]; -"409 Shape_428" [id=409, type=Shape]; -"410 Gather_429" [id=410, type=Gather]; -"411 Constant_430" [id=411, type=Constant]; -"412 Constant_431" [id=412, type=Constant]; -"413 Unsqueeze_432" [id=413, type=Unsqueeze]; -"414 Unsqueeze_433" [id=414, type=Unsqueeze]; -"415 Unsqueeze_434" [id=415, type=Unsqueeze]; -"416 Concat_435" [id=416, type=Concat]; -"417 Reshape_436" [id=417, type=Reshape]; -"418 QuantizeLinear_backbone.conf.4.weight_1" [id=418, type=QuantizeLinear]; -"419 DequantizeLinear_backbone.conf.4.weight_1" [id=419, type=DequantizeLinear]; -"420 Conv_437" [id=420, type=Conv]; -"421 Constant_438" [id=421, type=Constant]; -"422 Shape_439" [id=422, type=Shape]; -"423 Gather_440" [id=423, type=Gather]; -"424 Constant_441" [id=424, type=Constant]; -"425 Constant_442" [id=425, type=Constant]; -"426 Unsqueeze_443" [id=426, type=Unsqueeze]; -"427 Unsqueeze_444" [id=427, type=Unsqueeze]; -"428 Unsqueeze_445" [id=428, type=Unsqueeze]; -"429 Concat_446" [id=429, type=Concat]; -"430 Reshape_447" [id=430, type=Reshape]; -"431 QuantizeLinear_Relu_337_1" [id=431, type=QuantizeLinear]; -"432 DequantizeLinear_Relu_337_1" [id=432, type=DequantizeLinear]; -"433 QuantizeLinear_backbone.loc.5.weight_1" [id=433, type=QuantizeLinear]; -"434 DequantizeLinear_backbone.loc.5.weight_1" [id=434, type=DequantizeLinear]; -"435 Conv_448" [id=435, type=Conv]; -"436 Constant_449" [id=436, type=Constant]; -"437 Shape_450" [id=437, type=Shape]; -"438 Gather_451" [id=438, type=Gather]; -"439 Constant_452" [id=439, type=Constant]; -"440 Constant_453" [id=440, type=Constant]; -"441 Unsqueeze_454" [id=441, type=Unsqueeze]; -"442 Unsqueeze_455" [id=442, type=Unsqueeze]; -"443 Unsqueeze_456" [id=443, type=Unsqueeze]; -"444 Concat_457" [id=444, type=Concat]; -"445 Reshape_458" [id=445, type=Reshape]; -"446 QuantizeLinear_backbone.conf.5.weight_1" [id=446, type=QuantizeLinear]; -"447 DequantizeLinear_backbone.conf.5.weight_1" [id=447, type=DequantizeLinear]; -"448 Conv_459" [id=448, type=Conv]; -"449 Constant_460" [id=449, type=Constant]; -"450 Shape_461" [id=450, type=Shape]; -"451 Gather_462" [id=451, type=Gather]; -"452 Constant_463" [id=452, type=Constant]; -"453 Constant_464" [id=453, type=Constant]; -"454 Unsqueeze_465" [id=454, type=Unsqueeze]; -"455 Unsqueeze_466" [id=455, type=Unsqueeze]; -"456 Unsqueeze_467" [id=456, type=Unsqueeze]; -"457 Concat_468" [id=457, type=Concat]; -"458 Reshape_469" [id=458, type=Reshape]; -"459 Concat_470" [id=459, type=Concat]; -"460 Concat_471" [id=460, type=Concat]; -"461 Transpose_472" [id=461, type=Transpose]; -"462 Transpose_473" [id=462, type=Transpose]; -"463 Constant_474" [id=463, type=Constant]; -"464 Constant_475" [id=464, type=Constant]; -"465 Constant_476" [id=465, type=Constant]; -"466 Constant_477" [id=466, type=Constant]; -"467 Slice_478" [id=467, type=Slice]; -"468 Constant_479" [id=468, type=Constant]; -"469 Constant_480" [id=469, type=Constant]; -"470 Constant_481" [id=470, type=Constant]; -"471 Constant_482" [id=471, type=Constant]; -"472 Slice_483" [id=472, type=Slice]; -"473 Constant_484" [id=473, type=Constant]; -"474 Constant_485" [id=474, type=Constant]; -"475 Constant_486" [id=475, type=Constant]; -"476 Constant_487" [id=476, type=Constant]; -"477 Slice_488" [id=477, type=Slice]; -"478 Constant_489" [id=478, type=Constant]; -"479 Mul_490" [id=479, type=Mul]; -"480 Constant_491" [id=480, type=Constant]; -"481 Constant_492" [id=481, type=Constant]; -"482 Constant_493" [id=482, type=Constant]; -"483 Constant_494" [id=483, type=Constant]; -"484 Slice_495" [id=484, type=Slice]; -"485 Constant_496" [id=485, type=Constant]; -"486 Constant_497" [id=486, type=Constant]; -"487 Constant_498" [id=487, type=Constant]; -"488 Constant_499" [id=488, type=Constant]; -"489 Slice_500" [id=489, type=Slice]; -"490 Constant_501" [id=490, type=Constant]; -"491 Constant_502" [id=491, type=Constant]; -"492 Constant_503" [id=492, type=Constant]; -"493 Constant_504" [id=493, type=Constant]; -"494 Slice_505" [id=494, type=Slice]; -"495 Constant_506" [id=495, type=Constant]; -"496 Mul_507" [id=496, type=Mul]; -"497 Constant_508" [id=497, type=Constant]; -"498 Mul_509" [id=498, type=Mul]; -"499 Constant_510" [id=499, type=Constant]; -"500 Add_511" [id=500, type=Add]; -"501 Exp_512" [id=501, type=Exp]; -"502 Constant_513" [id=502, type=Constant]; -"503 Mul_514" [id=503, type=Mul]; -"504 Constant_515" [id=504, type=Constant]; -"505 Constant_516" [id=505, type=Constant]; -"506 Constant_517" [id=506, type=Constant]; -"507 Constant_518" [id=507, type=Constant]; -"508 Slice_519" [id=508, type=Slice]; -"509 Constant_520" [id=509, type=Constant]; -"510 Constant_521" [id=510, type=Constant]; -"511 Constant_522" [id=511, type=Constant]; -"512 Constant_523" [id=512, type=Constant]; -"513 Slice_524" [id=513, type=Slice]; -"514 Constant_525" [id=514, type=Constant]; -"515 Constant_526" [id=515, type=Constant]; -"516 Constant_527" [id=516, type=Constant]; -"517 Constant_528" [id=517, type=Constant]; -"518 Slice_529" [id=518, type=Slice]; -"519 Squeeze_530" [id=519, type=Squeeze]; -"520 Constant_531" [id=520, type=Constant]; -"521 Constant_532" [id=521, type=Constant]; -"522 Constant_533" [id=522, type=Constant]; -"523 Constant_534" [id=523, type=Constant]; -"524 Slice_535" [id=524, type=Slice]; -"525 Constant_536" [id=525, type=Constant]; -"526 Constant_537" [id=526, type=Constant]; -"527 Constant_538" [id=527, type=Constant]; -"528 Constant_539" [id=528, type=Constant]; -"529 Slice_540" [id=529, type=Slice]; -"530 Constant_541" [id=530, type=Constant]; -"531 Constant_542" [id=531, type=Constant]; -"532 Constant_543" [id=532, type=Constant]; -"533 Constant_544" [id=533, type=Constant]; -"534 Slice_545" [id=534, type=Slice]; -"535 Squeeze_546" [id=535, type=Squeeze]; -"536 Constant_547" [id=536, type=Constant]; -"537 Mul_548" [id=537, type=Mul]; -"538 Sub_549" [id=538, type=Sub]; -"539 Constant_550" [id=539, type=Constant]; -"540 Constant_551" [id=540, type=Constant]; -"541 Constant_552" [id=541, type=Constant]; -"542 Constant_553" [id=542, type=Constant]; -"543 Slice_554" [id=543, type=Slice]; -"544 Constant_555" [id=544, type=Constant]; -"545 Constant_556" [id=545, type=Constant]; -"546 Constant_557" [id=546, type=Constant]; -"547 Constant_558" [id=547, type=Constant]; -"548 Slice_559" [id=548, type=Slice]; -"549 Constant_560" [id=549, type=Constant]; -"550 Constant_561" [id=550, type=Constant]; -"551 Constant_562" [id=551, type=Constant]; -"552 Constant_563" [id=552, type=Constant]; -"553 Slice_564" [id=553, type=Slice]; -"554 Squeeze_565" [id=554, type=Squeeze]; -"555 Constant_566" [id=555, type=Constant]; -"556 Constant_567" [id=556, type=Constant]; -"557 Constant_568" [id=557, type=Constant]; -"558 Constant_569" [id=558, type=Constant]; -"559 Slice_570" [id=559, type=Slice]; -"560 Constant_571" [id=560, type=Constant]; -"561 Constant_572" [id=561, type=Constant]; -"562 Constant_573" [id=562, type=Constant]; -"563 Constant_574" [id=563, type=Constant]; -"564 Slice_575" [id=564, type=Slice]; -"565 Constant_576" [id=565, type=Constant]; -"566 Constant_577" [id=566, type=Constant]; -"567 Constant_578" [id=567, type=Constant]; -"568 Constant_579" [id=568, type=Constant]; -"569 Slice_580" [id=569, type=Slice]; -"570 Squeeze_581" [id=570, type=Squeeze]; -"571 Constant_582" [id=571, type=Constant]; -"572 Mul_583" [id=572, type=Mul]; -"573 Sub_584" [id=573, type=Sub]; -"574 Constant_585" [id=574, type=Constant]; -"575 Constant_586" [id=575, type=Constant]; -"576 Constant_587" [id=576, type=Constant]; -"577 Constant_588" [id=577, type=Constant]; -"578 Slice_589" [id=578, type=Slice]; -"579 Constant_590" [id=579, type=Constant]; -"580 Constant_591" [id=580, type=Constant]; -"581 Constant_592" [id=581, type=Constant]; -"582 Constant_593" [id=582, type=Constant]; -"583 Slice_594" [id=583, type=Slice]; -"584 Constant_595" [id=584, type=Constant]; -"585 Constant_596" [id=585, type=Constant]; -"586 Constant_597" [id=586, type=Constant]; -"587 Constant_598" [id=587, type=Constant]; -"588 Slice_599" [id=588, type=Slice]; -"589 Squeeze_600" [id=589, type=Squeeze]; -"590 Constant_601" [id=590, type=Constant]; -"591 Constant_602" [id=591, type=Constant]; -"592 Constant_603" [id=592, type=Constant]; -"593 Constant_604" [id=593, type=Constant]; -"594 Slice_605" [id=594, type=Slice]; -"595 Constant_606" [id=595, type=Constant]; -"596 Constant_607" [id=596, type=Constant]; -"597 Constant_608" [id=597, type=Constant]; -"598 Constant_609" [id=598, type=Constant]; -"599 Slice_610" [id=599, type=Slice]; -"600 Constant_611" [id=600, type=Constant]; -"601 Constant_612" [id=601, type=Constant]; -"602 Constant_613" [id=602, type=Constant]; -"603 Constant_614" [id=603, type=Constant]; -"604 Slice_615" [id=604, type=Slice]; -"605 Squeeze_616" [id=605, type=Squeeze]; -"606 Constant_617" [id=606, type=Constant]; -"607 Mul_618" [id=607, type=Mul]; -"608 Add_619" [id=608, type=Add]; -"609 Constant_620" [id=609, type=Constant]; -"610 Constant_621" [id=610, type=Constant]; -"611 Constant_622" [id=611, type=Constant]; -"612 Constant_623" [id=612, type=Constant]; -"613 Slice_624" [id=613, type=Slice]; -"614 Constant_625" [id=614, type=Constant]; -"615 Constant_626" [id=615, type=Constant]; -"616 Constant_627" [id=616, type=Constant]; -"617 Constant_628" [id=617, type=Constant]; -"618 Slice_629" [id=618, type=Slice]; -"619 Constant_630" [id=619, type=Constant]; -"620 Constant_631" [id=620, type=Constant]; -"621 Constant_632" [id=621, type=Constant]; -"622 Constant_633" [id=622, type=Constant]; -"623 Slice_634" [id=623, type=Slice]; -"624 Squeeze_635" [id=624, type=Squeeze]; -"625 Constant_636" [id=625, type=Constant]; -"626 Constant_637" [id=626, type=Constant]; -"627 Constant_638" [id=627, type=Constant]; -"628 Constant_639" [id=628, type=Constant]; -"629 Slice_640" [id=629, type=Slice]; -"630 Constant_641" [id=630, type=Constant]; -"631 Constant_642" [id=631, type=Constant]; -"632 Constant_643" [id=632, type=Constant]; -"633 Constant_644" [id=633, type=Constant]; -"634 Slice_645" [id=634, type=Slice]; -"635 Constant_646" [id=635, type=Constant]; -"636 Constant_647" [id=636, type=Constant]; -"637 Constant_648" [id=637, type=Constant]; -"638 Constant_649" [id=638, type=Constant]; -"639 Slice_650" [id=639, type=Slice]; -"640 Squeeze_651" [id=640, type=Squeeze]; -"641 Constant_652" [id=641, type=Constant]; -"642 Mul_653" [id=642, type=Mul]; -"643 Add_654" [id=643, type=Add]; -"644 Unsqueeze_655" [id=644, type=Unsqueeze]; -"645 Unsqueeze_656" [id=645, type=Unsqueeze]; -"646 Unsqueeze_657" [id=646, type=Unsqueeze]; -"647 Unsqueeze_658" [id=647, type=Unsqueeze]; -"648 Concat_659" [id=648, type=Concat]; -"649 Softmax_660" [id=649, type=Softmax]; -"650 Transpose_661" [id=650, type=Transpose]; -"651 Constant_662" [id=651, type=Constant]; -"652 Constant_663" [id=652, type=Constant]; -"653 Constant_664" [id=653, type=Constant]; -"654 Constant_665" [id=654, type=Constant]; -"655 Slice_666" [id=655, type=Slice]; -"656 Constant_667" [id=656, type=Constant]; -"657 Constant_668" [id=657, type=Constant]; -"658 Constant_669" [id=658, type=Constant]; -"659 Constant_670" [id=659, type=Constant]; -"660 Slice_671" [id=660, type=Slice]; -"661 Constant_672" [id=661, type=Constant]; -"662 Constant_673" [id=662, type=Constant]; -"663 Constant_674" [id=663, type=Constant]; -"664 Constant_675" [id=664, type=Constant]; -"665 Slice_676" [id=665, type=Slice]; -"666 Constant_677" [id=666, type=Constant]; -"667 ConstantOfShape_678" [id=667, type=ConstantOfShape]; -"668 Constant_679" [id=668, type=Constant]; -"669 ConstantOfShape_680" [id=669, type=ConstantOfShape]; -"670 Constant_681" [id=670, type=Constant]; -"671 ConstantOfShape_682" [id=671, type=ConstantOfShape]; -"672 NonMaxSuppression_683" [id=672, type=NonMaxSuppression]; -"673 Constant_684" [id=673, type=Constant]; -"674 Constant_685" [id=674, type=Constant]; -"675 Constant_686" [id=675, type=Constant]; -"676 Constant_687" [id=676, type=Constant]; -"677 Slice_688" [id=677, type=Slice]; -"678 Constant_689" [id=678, type=Constant]; -"679 Gather_690" [id=679, type=Gather]; -"680 Constant_691" [id=680, type=Constant]; -"681 Constant_692" [id=681, type=Constant]; -"682 Constant_693" [id=682, type=Constant]; -"683 Constant_694" [id=683, type=Constant]; -"684 Slice_695" [id=684, type=Slice]; -"685 Constant_696" [id=685, type=Constant]; -"686 Gather_697" [id=686, type=Gather]; -"687 Constant_698" [id=687, type=Constant]; -"688 Reshape_699" [id=688, type=Reshape]; -"689 Shape_700" [id=689, type=Shape]; -"690 Constant_701" [id=690, type=Constant]; -"691 Gather_702" [id=691, type=Gather]; -"692 Mul_703" [id=692, type=Mul]; -"693 Add_704" [id=693, type=Add]; -"694 Cast_705" [id=694, type=Cast]; -"695 Gather_706" [id=695, type=Gather]; -"696 Shape_707" [id=696, type=Shape]; -"697 Constant_708" [id=697, type=Constant]; -"698 Gather_709" [id=698, type=Gather]; -"699 Unsqueeze_710" [id=699, type=Unsqueeze]; -"700 Constant_711" [id=700, type=Constant]; -"701 Concat_712" [id=701, type=Concat]; -"702 Cast_713" [id=702, type=Cast]; -"703 ReduceMin_714" [id=703, type=ReduceMin]; -"704 Cast_715" [id=704, type=Cast]; -"705 Unsqueeze_716" [id=705, type=Unsqueeze]; -"706 TopK_717" [id=706, type=TopK]; -"707 Squeeze_719" [id=707, type=Squeeze]; -"708 Gather_720" [id=708, type=Gather]; -"709 Constant_721" [id=709, type=Constant]; -"710 Constant_722" [id=710, type=Constant]; -"711 Constant_723" [id=711, type=Constant]; -"712 Constant_724" [id=712, type=Constant]; -"713 Slice_725" [id=713, type=Slice]; -"714 Cast_726" [id=714, type=Cast]; -"715 Gather_727" [id=715, type=Gather]; -"716 Unsqueeze_bboxes" [id=716, type=Unsqueeze]; -"717 Gather_729" [id=717, type=Gather]; -"718 Unsqueeze_730" [id=718, type=Unsqueeze]; -"719 Constant_731" [id=719, type=Constant]; -"720 Add_labels" [id=720, type=Add]; -"721 Gather_733" [id=721, type=Gather]; -"722 Unsqueeze_scores" [id=722, type=Unsqueeze]; -"723 nncf_model_input_0" [id=723, type=nncf_model_input]; -"724 nncf_model_output_0" [id=724, type=nncf_model_output]; -"725 nncf_model_output_1" [id=725, type=nncf_model_output]; -"726 nncf_model_output_2" [id=726, type=nncf_model_output]; +"309 Constant_nncf_125" [id=309, type=Constant]; +"310 Unsqueeze_344" [id=310, type=Unsqueeze]; +"311 Constant_nncf_127" [id=311, type=Constant]; +"312 Unsqueeze_345" [id=312, type=Unsqueeze]; +"313 Constant_nncf_129" [id=313, type=Constant]; +"314 Unsqueeze_346" [id=314, type=Unsqueeze]; +"315 Concat_347" [id=315, type=Concat]; +"316 Reshape_348" [id=316, type=Reshape]; +"317 QuantizeLinear_backbone.conf.0.weight_1" [id=317, type=QuantizeLinear]; +"318 DequantizeLinear_backbone.conf.0.weight_1" [id=318, type=DequantizeLinear]; +"319 Conv_349" [id=319, type=Conv]; +"320 Constant_350" [id=320, type=Constant]; +"321 Shape_351" [id=321, type=Shape]; +"322 Gather_352" [id=322, type=Gather]; +"323 Constant_353" [id=323, type=Constant]; +"324 Constant_354" [id=324, type=Constant]; +"325 Constant_nncf_139" [id=325, type=Constant]; +"326 Unsqueeze_355" [id=326, type=Unsqueeze]; +"327 Constant_nncf_141" [id=327, type=Constant]; +"328 Unsqueeze_356" [id=328, type=Unsqueeze]; +"329 Constant_nncf_143" [id=329, type=Constant]; +"330 Unsqueeze_357" [id=330, type=Unsqueeze]; +"331 Concat_358" [id=331, type=Concat]; +"332 Reshape_359" [id=332, type=Reshape]; +"333 QuantizeLinear_backbone.loc.1.weight_1" [id=333, type=QuantizeLinear]; +"334 DequantizeLinear_backbone.loc.1.weight_1" [id=334, type=DequantizeLinear]; +"335 Conv_360" [id=335, type=Conv]; +"336 Constant_361" [id=336, type=Constant]; +"337 Shape_362" [id=337, type=Shape]; +"338 Gather_363" [id=338, type=Gather]; +"339 Constant_364" [id=339, type=Constant]; +"340 Constant_365" [id=340, type=Constant]; +"341 Constant_nncf_153" [id=341, type=Constant]; +"342 Unsqueeze_366" [id=342, type=Unsqueeze]; +"343 Constant_nncf_155" [id=343, type=Constant]; +"344 Unsqueeze_367" [id=344, type=Unsqueeze]; +"345 Constant_nncf_157" [id=345, type=Constant]; +"346 Unsqueeze_368" [id=346, type=Unsqueeze]; +"347 Concat_369" [id=347, type=Concat]; +"348 Reshape_370" [id=348, type=Reshape]; +"349 QuantizeLinear_backbone.conf.1.weight_1" [id=349, type=QuantizeLinear]; +"350 DequantizeLinear_backbone.conf.1.weight_1" [id=350, type=DequantizeLinear]; +"351 Conv_371" [id=351, type=Conv]; +"352 Constant_372" [id=352, type=Constant]; +"353 Shape_373" [id=353, type=Shape]; +"354 Gather_374" [id=354, type=Gather]; +"355 Constant_375" [id=355, type=Constant]; +"356 Constant_376" [id=356, type=Constant]; +"357 Constant_nncf_167" [id=357, type=Constant]; +"358 Unsqueeze_377" [id=358, type=Unsqueeze]; +"359 Constant_nncf_169" [id=359, type=Constant]; +"360 Unsqueeze_378" [id=360, type=Unsqueeze]; +"361 Constant_nncf_171" [id=361, type=Constant]; +"362 Unsqueeze_379" [id=362, type=Unsqueeze]; +"363 Concat_380" [id=363, type=Concat]; +"364 Reshape_381" [id=364, type=Reshape]; +"365 QuantizeLinear_backbone.loc.2.weight_1" [id=365, type=QuantizeLinear]; +"366 DequantizeLinear_backbone.loc.2.weight_1" [id=366, type=DequantizeLinear]; +"367 Conv_382" [id=367, type=Conv]; +"368 Constant_383" [id=368, type=Constant]; +"369 Shape_384" [id=369, type=Shape]; +"370 Gather_385" [id=370, type=Gather]; +"371 Constant_386" [id=371, type=Constant]; +"372 Constant_387" [id=372, type=Constant]; +"373 Constant_nncf_181" [id=373, type=Constant]; +"374 Unsqueeze_388" [id=374, type=Unsqueeze]; +"375 Constant_nncf_183" [id=375, type=Constant]; +"376 Unsqueeze_389" [id=376, type=Unsqueeze]; +"377 Constant_nncf_185" [id=377, type=Constant]; +"378 Unsqueeze_390" [id=378, type=Unsqueeze]; +"379 Concat_391" [id=379, type=Concat]; +"380 Reshape_392" [id=380, type=Reshape]; +"381 QuantizeLinear_backbone.conf.2.weight_1" [id=381, type=QuantizeLinear]; +"382 DequantizeLinear_backbone.conf.2.weight_1" [id=382, type=DequantizeLinear]; +"383 Conv_393" [id=383, type=Conv]; +"384 Constant_394" [id=384, type=Constant]; +"385 Shape_395" [id=385, type=Shape]; +"386 Gather_396" [id=386, type=Gather]; +"387 Constant_397" [id=387, type=Constant]; +"388 Constant_398" [id=388, type=Constant]; +"389 Constant_nncf_195" [id=389, type=Constant]; +"390 Unsqueeze_399" [id=390, type=Unsqueeze]; +"391 Constant_nncf_197" [id=391, type=Constant]; +"392 Unsqueeze_400" [id=392, type=Unsqueeze]; +"393 Constant_nncf_199" [id=393, type=Constant]; +"394 Unsqueeze_401" [id=394, type=Unsqueeze]; +"395 Concat_402" [id=395, type=Concat]; +"396 Reshape_403" [id=396, type=Reshape]; +"397 QuantizeLinear_backbone.loc.3.weight_1" [id=397, type=QuantizeLinear]; +"398 DequantizeLinear_backbone.loc.3.weight_1" [id=398, type=DequantizeLinear]; +"399 Conv_404" [id=399, type=Conv]; +"400 Constant_405" [id=400, type=Constant]; +"401 Shape_406" [id=401, type=Shape]; +"402 Gather_407" [id=402, type=Gather]; +"403 Constant_408" [id=403, type=Constant]; +"404 Constant_409" [id=404, type=Constant]; +"405 Constant_nncf_209" [id=405, type=Constant]; +"406 Unsqueeze_410" [id=406, type=Unsqueeze]; +"407 Constant_nncf_211" [id=407, type=Constant]; +"408 Unsqueeze_411" [id=408, type=Unsqueeze]; +"409 Constant_nncf_213" [id=409, type=Constant]; +"410 Unsqueeze_412" [id=410, type=Unsqueeze]; +"411 Concat_413" [id=411, type=Concat]; +"412 Reshape_414" [id=412, type=Reshape]; +"413 QuantizeLinear_backbone.conf.3.weight_1" [id=413, type=QuantizeLinear]; +"414 DequantizeLinear_backbone.conf.3.weight_1" [id=414, type=DequantizeLinear]; +"415 Conv_415" [id=415, type=Conv]; +"416 Constant_416" [id=416, type=Constant]; +"417 Shape_417" [id=417, type=Shape]; +"418 Gather_418" [id=418, type=Gather]; +"419 Constant_419" [id=419, type=Constant]; +"420 Constant_420" [id=420, type=Constant]; +"421 Constant_nncf_223" [id=421, type=Constant]; +"422 Unsqueeze_421" [id=422, type=Unsqueeze]; +"423 Constant_nncf_225" [id=423, type=Constant]; +"424 Unsqueeze_422" [id=424, type=Unsqueeze]; +"425 Constant_nncf_227" [id=425, type=Constant]; +"426 Unsqueeze_423" [id=426, type=Unsqueeze]; +"427 Concat_424" [id=427, type=Concat]; +"428 Reshape_425" [id=428, type=Reshape]; +"429 QuantizeLinear_backbone.loc.4.weight_1" [id=429, type=QuantizeLinear]; +"430 DequantizeLinear_backbone.loc.4.weight_1" [id=430, type=DequantizeLinear]; +"431 Conv_426" [id=431, type=Conv]; +"432 Constant_427" [id=432, type=Constant]; +"433 Shape_428" [id=433, type=Shape]; +"434 Gather_429" [id=434, type=Gather]; +"435 Constant_430" [id=435, type=Constant]; +"436 Constant_431" [id=436, type=Constant]; +"437 Constant_nncf_237" [id=437, type=Constant]; +"438 Unsqueeze_432" [id=438, type=Unsqueeze]; +"439 Constant_nncf_239" [id=439, type=Constant]; +"440 Unsqueeze_433" [id=440, type=Unsqueeze]; +"441 Constant_nncf_241" [id=441, type=Constant]; +"442 Unsqueeze_434" [id=442, type=Unsqueeze]; +"443 Concat_435" [id=443, type=Concat]; +"444 Reshape_436" [id=444, type=Reshape]; +"445 QuantizeLinear_backbone.conf.4.weight_1" [id=445, type=QuantizeLinear]; +"446 DequantizeLinear_backbone.conf.4.weight_1" [id=446, type=DequantizeLinear]; +"447 Conv_437" [id=447, type=Conv]; +"448 Constant_438" [id=448, type=Constant]; +"449 Shape_439" [id=449, type=Shape]; +"450 Gather_440" [id=450, type=Gather]; +"451 Constant_441" [id=451, type=Constant]; +"452 Constant_442" [id=452, type=Constant]; +"453 Constant_nncf_251" [id=453, type=Constant]; +"454 Unsqueeze_443" [id=454, type=Unsqueeze]; +"455 Constant_nncf_253" [id=455, type=Constant]; +"456 Unsqueeze_444" [id=456, type=Unsqueeze]; +"457 Constant_nncf_255" [id=457, type=Constant]; +"458 Unsqueeze_445" [id=458, type=Unsqueeze]; +"459 Concat_446" [id=459, type=Concat]; +"460 Reshape_447" [id=460, type=Reshape]; +"461 QuantizeLinear_Relu_337_1" [id=461, type=QuantizeLinear]; +"462 DequantizeLinear_Relu_337_1" [id=462, type=DequantizeLinear]; +"463 QuantizeLinear_backbone.loc.5.weight_1" [id=463, type=QuantizeLinear]; +"464 DequantizeLinear_backbone.loc.5.weight_1" [id=464, type=DequantizeLinear]; +"465 Conv_448" [id=465, type=Conv]; +"466 Constant_449" [id=466, type=Constant]; +"467 Shape_450" [id=467, type=Shape]; +"468 Gather_451" [id=468, type=Gather]; +"469 Constant_452" [id=469, type=Constant]; +"470 Constant_453" [id=470, type=Constant]; +"471 Constant_nncf_265" [id=471, type=Constant]; +"472 Unsqueeze_454" [id=472, type=Unsqueeze]; +"473 Constant_nncf_267" [id=473, type=Constant]; +"474 Unsqueeze_455" [id=474, type=Unsqueeze]; +"475 Constant_nncf_269" [id=475, type=Constant]; +"476 Unsqueeze_456" [id=476, type=Unsqueeze]; +"477 Concat_457" [id=477, type=Concat]; +"478 Reshape_458" [id=478, type=Reshape]; +"479 QuantizeLinear_backbone.conf.5.weight_1" [id=479, type=QuantizeLinear]; +"480 DequantizeLinear_backbone.conf.5.weight_1" [id=480, type=DequantizeLinear]; +"481 Conv_459" [id=481, type=Conv]; +"482 Constant_460" [id=482, type=Constant]; +"483 Shape_461" [id=483, type=Shape]; +"484 Gather_462" [id=484, type=Gather]; +"485 Constant_463" [id=485, type=Constant]; +"486 Constant_464" [id=486, type=Constant]; +"487 Constant_nncf_279" [id=487, type=Constant]; +"488 Unsqueeze_465" [id=488, type=Unsqueeze]; +"489 Constant_nncf_281" [id=489, type=Constant]; +"490 Unsqueeze_466" [id=490, type=Unsqueeze]; +"491 Constant_nncf_283" [id=491, type=Constant]; +"492 Unsqueeze_467" [id=492, type=Unsqueeze]; +"493 Concat_468" [id=493, type=Concat]; +"494 Reshape_469" [id=494, type=Reshape]; +"495 Concat_470" [id=495, type=Concat]; +"496 Concat_471" [id=496, type=Concat]; +"497 Transpose_472" [id=497, type=Transpose]; +"498 Transpose_473" [id=498, type=Transpose]; +"499 Constant_474" [id=499, type=Constant]; +"500 Constant_475" [id=500, type=Constant]; +"501 Constant_476" [id=501, type=Constant]; +"502 Constant_477" [id=502, type=Constant]; +"503 Slice_478" [id=503, type=Slice]; +"504 Constant_479" [id=504, type=Constant]; +"505 Constant_480" [id=505, type=Constant]; +"506 Constant_481" [id=506, type=Constant]; +"507 Constant_482" [id=507, type=Constant]; +"508 Slice_483" [id=508, type=Slice]; +"509 Constant_484" [id=509, type=Constant]; +"510 Constant_485" [id=510, type=Constant]; +"511 Constant_486" [id=511, type=Constant]; +"512 Constant_487" [id=512, type=Constant]; +"513 Slice_488" [id=513, type=Slice]; +"514 Constant_489" [id=514, type=Constant]; +"515 Mul_490" [id=515, type=Mul]; +"516 Constant_491" [id=516, type=Constant]; +"517 Constant_492" [id=517, type=Constant]; +"518 Constant_493" [id=518, type=Constant]; +"519 Constant_494" [id=519, type=Constant]; +"520 Slice_495" [id=520, type=Slice]; +"521 Constant_496" [id=521, type=Constant]; +"522 Constant_497" [id=522, type=Constant]; +"523 Constant_498" [id=523, type=Constant]; +"524 Constant_499" [id=524, type=Constant]; +"525 Slice_500" [id=525, type=Slice]; +"526 Constant_501" [id=526, type=Constant]; +"527 Constant_502" [id=527, type=Constant]; +"528 Constant_503" [id=528, type=Constant]; +"529 Constant_504" [id=529, type=Constant]; +"530 Slice_505" [id=530, type=Slice]; +"531 Constant_506" [id=531, type=Constant]; +"532 Mul_507" [id=532, type=Mul]; +"533 Constant_508" [id=533, type=Constant]; +"534 Mul_509" [id=534, type=Mul]; +"535 Constant_510" [id=535, type=Constant]; +"536 Add_511" [id=536, type=Add]; +"537 Exp_512" [id=537, type=Exp]; +"538 Constant_513" [id=538, type=Constant]; +"539 Mul_514" [id=539, type=Mul]; +"540 Constant_515" [id=540, type=Constant]; +"541 Constant_516" [id=541, type=Constant]; +"542 Constant_517" [id=542, type=Constant]; +"543 Constant_518" [id=543, type=Constant]; +"544 Slice_519" [id=544, type=Slice]; +"545 Constant_520" [id=545, type=Constant]; +"546 Constant_521" [id=546, type=Constant]; +"547 Constant_522" [id=547, type=Constant]; +"548 Constant_523" [id=548, type=Constant]; +"549 Slice_524" [id=549, type=Slice]; +"550 Constant_525" [id=550, type=Constant]; +"551 Constant_526" [id=551, type=Constant]; +"552 Constant_527" [id=552, type=Constant]; +"553 Constant_528" [id=553, type=Constant]; +"554 Slice_529" [id=554, type=Slice]; +"555 Constant_nncf_347" [id=555, type=Constant]; +"556 Squeeze_530" [id=556, type=Squeeze]; +"557 Constant_531" [id=557, type=Constant]; +"558 Constant_532" [id=558, type=Constant]; +"559 Constant_533" [id=559, type=Constant]; +"560 Constant_534" [id=560, type=Constant]; +"561 Slice_535" [id=561, type=Slice]; +"562 Constant_536" [id=562, type=Constant]; +"563 Constant_537" [id=563, type=Constant]; +"564 Constant_538" [id=564, type=Constant]; +"565 Constant_539" [id=565, type=Constant]; +"566 Slice_540" [id=566, type=Slice]; +"567 Constant_541" [id=567, type=Constant]; +"568 Constant_542" [id=568, type=Constant]; +"569 Constant_543" [id=569, type=Constant]; +"570 Constant_544" [id=570, type=Constant]; +"571 Slice_545" [id=571, type=Slice]; +"572 Constant_nncf_364" [id=572, type=Constant]; +"573 Squeeze_546" [id=573, type=Squeeze]; +"574 Constant_547" [id=574, type=Constant]; +"575 Mul_548" [id=575, type=Mul]; +"576 Sub_549" [id=576, type=Sub]; +"577 Constant_550" [id=577, type=Constant]; +"578 Constant_551" [id=578, type=Constant]; +"579 Constant_552" [id=579, type=Constant]; +"580 Constant_553" [id=580, type=Constant]; +"581 Slice_554" [id=581, type=Slice]; +"582 Constant_555" [id=582, type=Constant]; +"583 Constant_556" [id=583, type=Constant]; +"584 Constant_557" [id=584, type=Constant]; +"585 Constant_558" [id=585, type=Constant]; +"586 Slice_559" [id=586, type=Slice]; +"587 Constant_560" [id=587, type=Constant]; +"588 Constant_561" [id=588, type=Constant]; +"589 Constant_562" [id=589, type=Constant]; +"590 Constant_563" [id=590, type=Constant]; +"591 Slice_564" [id=591, type=Slice]; +"592 Constant_nncf_384" [id=592, type=Constant]; +"593 Squeeze_565" [id=593, type=Squeeze]; +"594 Constant_566" [id=594, type=Constant]; +"595 Constant_567" [id=595, type=Constant]; +"596 Constant_568" [id=596, type=Constant]; +"597 Constant_569" [id=597, type=Constant]; +"598 Slice_570" [id=598, type=Slice]; +"599 Constant_571" [id=599, type=Constant]; +"600 Constant_572" [id=600, type=Constant]; +"601 Constant_573" [id=601, type=Constant]; +"602 Constant_574" [id=602, type=Constant]; +"603 Slice_575" [id=603, type=Slice]; +"604 Constant_576" [id=604, type=Constant]; +"605 Constant_577" [id=605, type=Constant]; +"606 Constant_578" [id=606, type=Constant]; +"607 Constant_579" [id=607, type=Constant]; +"608 Slice_580" [id=608, type=Slice]; +"609 Constant_nncf_401" [id=609, type=Constant]; +"610 Squeeze_581" [id=610, type=Squeeze]; +"611 Constant_582" [id=611, type=Constant]; +"612 Mul_583" [id=612, type=Mul]; +"613 Sub_584" [id=613, type=Sub]; +"614 Constant_585" [id=614, type=Constant]; +"615 Constant_586" [id=615, type=Constant]; +"616 Constant_587" [id=616, type=Constant]; +"617 Constant_588" [id=617, type=Constant]; +"618 Slice_589" [id=618, type=Slice]; +"619 Constant_590" [id=619, type=Constant]; +"620 Constant_591" [id=620, type=Constant]; +"621 Constant_592" [id=621, type=Constant]; +"622 Constant_593" [id=622, type=Constant]; +"623 Slice_594" [id=623, type=Slice]; +"624 Constant_595" [id=624, type=Constant]; +"625 Constant_596" [id=625, type=Constant]; +"626 Constant_597" [id=626, type=Constant]; +"627 Constant_598" [id=627, type=Constant]; +"628 Slice_599" [id=628, type=Slice]; +"629 Constant_nncf_421" [id=629, type=Constant]; +"630 Squeeze_600" [id=630, type=Squeeze]; +"631 Constant_601" [id=631, type=Constant]; +"632 Constant_602" [id=632, type=Constant]; +"633 Constant_603" [id=633, type=Constant]; +"634 Constant_604" [id=634, type=Constant]; +"635 Slice_605" [id=635, type=Slice]; +"636 Constant_606" [id=636, type=Constant]; +"637 Constant_607" [id=637, type=Constant]; +"638 Constant_608" [id=638, type=Constant]; +"639 Constant_609" [id=639, type=Constant]; +"640 Slice_610" [id=640, type=Slice]; +"641 Constant_611" [id=641, type=Constant]; +"642 Constant_612" [id=642, type=Constant]; +"643 Constant_613" [id=643, type=Constant]; +"644 Constant_614" [id=644, type=Constant]; +"645 Slice_615" [id=645, type=Slice]; +"646 Constant_nncf_438" [id=646, type=Constant]; +"647 Squeeze_616" [id=647, type=Squeeze]; +"648 Constant_617" [id=648, type=Constant]; +"649 Mul_618" [id=649, type=Mul]; +"650 Add_619" [id=650, type=Add]; +"651 Constant_620" [id=651, type=Constant]; +"652 Constant_621" [id=652, type=Constant]; +"653 Constant_622" [id=653, type=Constant]; +"654 Constant_623" [id=654, type=Constant]; +"655 Slice_624" [id=655, type=Slice]; +"656 Constant_625" [id=656, type=Constant]; +"657 Constant_626" [id=657, type=Constant]; +"658 Constant_627" [id=658, type=Constant]; +"659 Constant_628" [id=659, type=Constant]; +"660 Slice_629" [id=660, type=Slice]; +"661 Constant_630" [id=661, type=Constant]; +"662 Constant_631" [id=662, type=Constant]; +"663 Constant_632" [id=663, type=Constant]; +"664 Constant_633" [id=664, type=Constant]; +"665 Slice_634" [id=665, type=Slice]; +"666 Constant_nncf_458" [id=666, type=Constant]; +"667 Squeeze_635" [id=667, type=Squeeze]; +"668 Constant_636" [id=668, type=Constant]; +"669 Constant_637" [id=669, type=Constant]; +"670 Constant_638" [id=670, type=Constant]; +"671 Constant_639" [id=671, type=Constant]; +"672 Slice_640" [id=672, type=Slice]; +"673 Constant_641" [id=673, type=Constant]; +"674 Constant_642" [id=674, type=Constant]; +"675 Constant_643" [id=675, type=Constant]; +"676 Constant_644" [id=676, type=Constant]; +"677 Slice_645" [id=677, type=Slice]; +"678 Constant_646" [id=678, type=Constant]; +"679 Constant_647" [id=679, type=Constant]; +"680 Constant_648" [id=680, type=Constant]; +"681 Constant_649" [id=681, type=Constant]; +"682 Slice_650" [id=682, type=Slice]; +"683 Constant_nncf_475" [id=683, type=Constant]; +"684 Squeeze_651" [id=684, type=Squeeze]; +"685 Constant_652" [id=685, type=Constant]; +"686 Mul_653" [id=686, type=Mul]; +"687 Add_654" [id=687, type=Add]; +"688 Constant_nncf_480" [id=688, type=Constant]; +"689 Unsqueeze_655" [id=689, type=Unsqueeze]; +"690 Constant_nncf_482" [id=690, type=Constant]; +"691 Unsqueeze_656" [id=691, type=Unsqueeze]; +"692 Constant_nncf_484" [id=692, type=Constant]; +"693 Unsqueeze_657" [id=693, type=Unsqueeze]; +"694 Constant_nncf_486" [id=694, type=Constant]; +"695 Unsqueeze_658" [id=695, type=Unsqueeze]; +"696 Concat_659" [id=696, type=Concat]; +"697 Shape_nncf_489" [id=697, type=Shape]; +"698 Flatten_nncf_490" [id=698, type=Flatten]; +"699 Softmax_660" [id=699, type=Softmax]; +"700 Reshape_nncf_492" [id=700, type=Reshape]; +"701 Transpose_661" [id=701, type=Transpose]; +"702 Constant_662" [id=702, type=Constant]; +"703 Constant_663" [id=703, type=Constant]; +"704 Constant_664" [id=704, type=Constant]; +"705 Constant_665" [id=705, type=Constant]; +"706 Slice_666" [id=706, type=Slice]; +"707 Constant_667" [id=707, type=Constant]; +"708 Constant_668" [id=708, type=Constant]; +"709 Constant_669" [id=709, type=Constant]; +"710 Constant_670" [id=710, type=Constant]; +"711 Slice_671" [id=711, type=Slice]; +"712 Constant_672" [id=712, type=Constant]; +"713 Constant_673" [id=713, type=Constant]; +"714 Constant_674" [id=714, type=Constant]; +"715 Constant_675" [id=715, type=Constant]; +"716 Slice_676" [id=716, type=Slice]; +"717 Constant_677" [id=717, type=Constant]; +"718 ConstantOfShape_678" [id=718, type=ConstantOfShape]; +"719 Constant_679" [id=719, type=Constant]; +"720 ConstantOfShape_680" [id=720, type=ConstantOfShape]; +"721 Constant_681" [id=721, type=Constant]; +"722 ConstantOfShape_682" [id=722, type=ConstantOfShape]; +"723 NonMaxSuppression_683" [id=723, type=NonMaxSuppression]; +"724 Constant_684" [id=724, type=Constant]; +"725 Constant_685" [id=725, type=Constant]; +"726 Constant_686" [id=726, type=Constant]; +"727 Constant_687" [id=727, type=Constant]; +"728 Slice_688" [id=728, type=Slice]; +"729 Constant_689" [id=729, type=Constant]; +"730 Gather_690" [id=730, type=Gather]; +"731 Constant_691" [id=731, type=Constant]; +"732 Constant_692" [id=732, type=Constant]; +"733 Constant_693" [id=733, type=Constant]; +"734 Constant_694" [id=734, type=Constant]; +"735 Slice_695" [id=735, type=Slice]; +"736 Constant_696" [id=736, type=Constant]; +"737 Gather_697" [id=737, type=Gather]; +"738 Constant_698" [id=738, type=Constant]; +"739 Reshape_699" [id=739, type=Reshape]; +"740 Shape_700" [id=740, type=Shape]; +"741 Constant_701" [id=741, type=Constant]; +"742 Gather_702" [id=742, type=Gather]; +"743 Mul_703" [id=743, type=Mul]; +"744 Add_704" [id=744, type=Add]; +"745 Cast_705" [id=745, type=Cast]; +"746 Gather_706" [id=746, type=Gather]; +"747 Shape_707" [id=747, type=Shape]; +"748 Constant_708" [id=748, type=Constant]; +"749 Gather_709" [id=749, type=Gather]; +"750 Constant_nncf_542" [id=750, type=Constant]; +"751 Unsqueeze_710" [id=751, type=Unsqueeze]; +"752 Constant_711" [id=752, type=Constant]; +"753 Concat_712" [id=753, type=Concat]; +"754 Cast_713" [id=754, type=Cast]; +"755 ReduceMin_714" [id=755, type=ReduceMin]; +"756 Cast_715" [id=756, type=Cast]; +"757 Constant_nncf_549" [id=757, type=Constant]; +"758 Unsqueeze_716" [id=758, type=Unsqueeze]; +"759 TopK_717" [id=759, type=TopK]; +"760 Constant_nncf_552" [id=760, type=Constant]; +"761 Squeeze_719" [id=761, type=Squeeze]; +"762 Gather_720" [id=762, type=Gather]; +"763 Constant_721" [id=763, type=Constant]; +"764 Constant_722" [id=764, type=Constant]; +"765 Constant_723" [id=765, type=Constant]; +"766 Constant_724" [id=766, type=Constant]; +"767 Slice_725" [id=767, type=Slice]; +"768 Cast_726" [id=768, type=Cast]; +"769 Gather_727" [id=769, type=Gather]; +"770 Constant_nncf_562" [id=770, type=Constant]; +"771 Unsqueeze_bboxes" [id=771, type=Unsqueeze]; +"772 Gather_729" [id=772, type=Gather]; +"773 Constant_nncf_565" [id=773, type=Constant]; +"774 Unsqueeze_730" [id=774, type=Unsqueeze]; +"775 Constant_731" [id=775, type=Constant]; +"776 Add_labels" [id=776, type=Add]; +"777 Gather_733" [id=777, type=Gather]; +"778 Constant_nncf_570" [id=778, type=Constant]; +"779 Unsqueeze_scores" [id=779, type=Unsqueeze]; +"780 nncf_model_input_0" [id=780, type=nncf_model_input]; +"781 nncf_model_output_0" [id=781, type=nncf_model_output]; +"782 nncf_model_output_1" [id=782, type=nncf_model_output]; +"783 nncf_model_output_2" [id=783, type=nncf_model_output]; "0 QuantizeLinear_image_1" -> "1 DequantizeLinear_image_1" [label="[1, 3, 1200, 1200]", style=dashed]; "1 DequantizeLinear_image_1" -> "4 Conv_219" [label="[1, 3, 1200, 1200]", style=solid]; "2 QuantizeLinear_backbone.model.layer1.0.weight_1" -> "3 DequantizeLinear_backbone.model.layer1.0.weight_1" [label="[64, 3, 7, 7]", style=dashed]; @@ -984,8 +1041,8 @@ strict digraph { "242 DequantizeLinear_Relu_317_1" -> "245 Conv_318" [label="[1, 256, 150, 150]", style=solid]; "242 DequantizeLinear_Relu_317_1" -> "303 Conv_338" [label="[1, 256, 150, 150]", style=solid]; "242 DequantizeLinear_Relu_317_1" -> "305 Shape_340" [label="[1, 256, 150, 150]", style=solid]; -"242 DequantizeLinear_Relu_317_1" -> "316 Conv_349" [label="[1, 256, 150, 150]", style=solid]; -"242 DequantizeLinear_Relu_317_1" -> "318 Shape_351" [label="[1, 256, 150, 150]", style=solid]; +"242 DequantizeLinear_Relu_317_1" -> "319 Conv_349" [label="[1, 256, 150, 150]", style=solid]; +"242 DequantizeLinear_Relu_317_1" -> "321 Shape_351" [label="[1, 256, 150, 150]", style=solid]; "243 QuantizeLinear_backbone.additional_blocks.0.0.weight_1" -> "244 DequantizeLinear_backbone.additional_blocks.0.0.weight_1" [label="[256, 256, 1, 1]", style=dashed]; "244 DequantizeLinear_backbone.additional_blocks.0.0.weight_1" -> "245 Conv_318" [label="[256, 256, 1, 1]", style=solid]; "245 Conv_318" -> "246 Relu_319" [label="[1, 256, 150, 150]", style=solid]; @@ -998,10 +1055,10 @@ strict digraph { "252 Relu_321" -> "253 QuantizeLinear_Relu_321_1" [label="[1, 512, 75, 75]", style=solid]; "253 QuantizeLinear_Relu_321_1" -> "254 DequantizeLinear_Relu_321_1" [label="[1, 512, 75, 75]", style=dashed]; "254 DequantizeLinear_Relu_321_1" -> "257 Conv_322" [label="[1, 512, 75, 75]", style=solid]; -"254 DequantizeLinear_Relu_321_1" -> "329 Conv_360" [label="[1, 512, 75, 75]", style=solid]; -"254 DequantizeLinear_Relu_321_1" -> "331 Shape_362" [label="[1, 512, 75, 75]", style=solid]; -"254 DequantizeLinear_Relu_321_1" -> "342 Conv_371" [label="[1, 512, 75, 75]", style=solid]; -"254 DequantizeLinear_Relu_321_1" -> "344 Shape_373" [label="[1, 512, 75, 75]", style=solid]; +"254 DequantizeLinear_Relu_321_1" -> "335 Conv_360" [label="[1, 512, 75, 75]", style=solid]; +"254 DequantizeLinear_Relu_321_1" -> "337 Shape_362" [label="[1, 512, 75, 75]", style=solid]; +"254 DequantizeLinear_Relu_321_1" -> "351 Conv_371" [label="[1, 512, 75, 75]", style=solid]; +"254 DequantizeLinear_Relu_321_1" -> "353 Shape_373" [label="[1, 512, 75, 75]", style=solid]; "255 QuantizeLinear_backbone.additional_blocks.1.0.weight_1" -> "256 DequantizeLinear_backbone.additional_blocks.1.0.weight_1" [label="[256, 512, 1, 1]", style=dashed]; "256 DequantizeLinear_backbone.additional_blocks.1.0.weight_1" -> "257 Conv_322" [label="[256, 512, 1, 1]", style=solid]; "257 Conv_322" -> "258 Relu_323" [label="[1, 256, 75, 75]", style=solid]; @@ -1014,10 +1071,10 @@ strict digraph { "264 Relu_325" -> "265 QuantizeLinear_Relu_325_1" [label="[1, 512, 38, 38]", style=solid]; "265 QuantizeLinear_Relu_325_1" -> "266 DequantizeLinear_Relu_325_1" [label="[1, 512, 38, 38]", style=dashed]; "266 DequantizeLinear_Relu_325_1" -> "269 Conv_326" [label="[1, 512, 38, 38]", style=solid]; -"266 DequantizeLinear_Relu_325_1" -> "355 Conv_382" [label="[1, 512, 38, 38]", style=solid]; -"266 DequantizeLinear_Relu_325_1" -> "357 Shape_384" [label="[1, 512, 38, 38]", style=solid]; -"266 DequantizeLinear_Relu_325_1" -> "368 Conv_393" [label="[1, 512, 38, 38]", style=solid]; -"266 DequantizeLinear_Relu_325_1" -> "370 Shape_395" [label="[1, 512, 38, 38]", style=solid]; +"266 DequantizeLinear_Relu_325_1" -> "367 Conv_382" [label="[1, 512, 38, 38]", style=solid]; +"266 DequantizeLinear_Relu_325_1" -> "369 Shape_384" [label="[1, 512, 38, 38]", style=solid]; +"266 DequantizeLinear_Relu_325_1" -> "383 Conv_393" [label="[1, 512, 38, 38]", style=solid]; +"266 DequantizeLinear_Relu_325_1" -> "385 Shape_395" [label="[1, 512, 38, 38]", style=solid]; "267 QuantizeLinear_backbone.additional_blocks.2.0.weight_1" -> "268 DequantizeLinear_backbone.additional_blocks.2.0.weight_1" [label="[128, 512, 1, 1]", style=dashed]; "268 DequantizeLinear_backbone.additional_blocks.2.0.weight_1" -> "269 Conv_326" [label="[128, 512, 1, 1]", style=solid]; "269 Conv_326" -> "270 Relu_327" [label="[1, 128, 38, 38]", style=solid]; @@ -1030,10 +1087,10 @@ strict digraph { "276 Relu_329" -> "277 QuantizeLinear_Relu_329_1" [label="[1, 256, 19, 19]", style=solid]; "277 QuantizeLinear_Relu_329_1" -> "278 DequantizeLinear_Relu_329_1" [label="[1, 256, 19, 19]", style=dashed]; "278 DequantizeLinear_Relu_329_1" -> "281 Conv_330" [label="[1, 256, 19, 19]", style=solid]; -"278 DequantizeLinear_Relu_329_1" -> "381 Conv_404" [label="[1, 256, 19, 19]", style=solid]; -"278 DequantizeLinear_Relu_329_1" -> "383 Shape_406" [label="[1, 256, 19, 19]", style=solid]; -"278 DequantizeLinear_Relu_329_1" -> "394 Conv_415" [label="[1, 256, 19, 19]", style=solid]; -"278 DequantizeLinear_Relu_329_1" -> "396 Shape_417" [label="[1, 256, 19, 19]", style=solid]; +"278 DequantizeLinear_Relu_329_1" -> "399 Conv_404" [label="[1, 256, 19, 19]", style=solid]; +"278 DequantizeLinear_Relu_329_1" -> "401 Shape_406" [label="[1, 256, 19, 19]", style=solid]; +"278 DequantizeLinear_Relu_329_1" -> "415 Conv_415" [label="[1, 256, 19, 19]", style=solid]; +"278 DequantizeLinear_Relu_329_1" -> "417 Shape_417" [label="[1, 256, 19, 19]", style=solid]; "279 QuantizeLinear_backbone.additional_blocks.3.0.weight_1" -> "280 DequantizeLinear_backbone.additional_blocks.3.0.weight_1" [label="[128, 256, 1, 1]", style=dashed]; "280 DequantizeLinear_backbone.additional_blocks.3.0.weight_1" -> "281 Conv_330" [label="[128, 256, 1, 1]", style=solid]; "281 Conv_330" -> "282 Relu_331" [label="[1, 128, 19, 19]", style=solid]; @@ -1046,10 +1103,10 @@ strict digraph { "288 Relu_333" -> "289 QuantizeLinear_Relu_333_1" [label="[1, 256, 9, 9]", style=solid]; "289 QuantizeLinear_Relu_333_1" -> "290 DequantizeLinear_Relu_333_1" [label="[1, 256, 9, 9]", style=dashed]; "290 DequantizeLinear_Relu_333_1" -> "293 Conv_334" [label="[1, 256, 9, 9]", style=solid]; -"290 DequantizeLinear_Relu_333_1" -> "407 Conv_426" [label="[1, 256, 9, 9]", style=solid]; -"290 DequantizeLinear_Relu_333_1" -> "409 Shape_428" [label="[1, 256, 9, 9]", style=solid]; -"290 DequantizeLinear_Relu_333_1" -> "420 Conv_437" [label="[1, 256, 9, 9]", style=solid]; -"290 DequantizeLinear_Relu_333_1" -> "422 Shape_439" [label="[1, 256, 9, 9]", style=solid]; +"290 DequantizeLinear_Relu_333_1" -> "431 Conv_426" [label="[1, 256, 9, 9]", style=solid]; +"290 DequantizeLinear_Relu_333_1" -> "433 Shape_428" [label="[1, 256, 9, 9]", style=solid]; +"290 DequantizeLinear_Relu_333_1" -> "447 Conv_437" [label="[1, 256, 9, 9]", style=solid]; +"290 DequantizeLinear_Relu_333_1" -> "449 Shape_439" [label="[1, 256, 9, 9]", style=solid]; "291 QuantizeLinear_backbone.additional_blocks.4.0.weight_1" -> "292 DequantizeLinear_backbone.additional_blocks.4.0.weight_1" [label="[128, 256, 1, 1]", style=dashed]; "292 DequantizeLinear_backbone.additional_blocks.4.0.weight_1" -> "293 Conv_334" [label="[128, 256, 1, 1]", style=solid]; "293 Conv_334" -> "294 Relu_335" [label="[1, 128, 9, 9]", style=solid]; @@ -1059,448 +1116,506 @@ strict digraph { "297 QuantizeLinear_backbone.additional_blocks.4.2.weight_1" -> "298 DequantizeLinear_backbone.additional_blocks.4.2.weight_1" [label="[256, 128, 3, 3]", style=dashed]; "298 DequantizeLinear_backbone.additional_blocks.4.2.weight_1" -> "299 Conv_336" [label="[256, 128, 3, 3]", style=solid]; "299 Conv_336" -> "300 Relu_337" [label="[1, 256, 7, 7]", style=solid]; -"300 Relu_337" -> "431 QuantizeLinear_Relu_337_1" [label="[1, 256, 7, 7]", style=solid]; +"300 Relu_337" -> "461 QuantizeLinear_Relu_337_1" [label="[1, 256, 7, 7]", style=solid]; "301 QuantizeLinear_backbone.loc.0.weight_1" -> "302 DequantizeLinear_backbone.loc.0.weight_1" [label="[16, 256, 3, 3]", style=dashed]; "302 DequantizeLinear_backbone.loc.0.weight_1" -> "303 Conv_338" [label="[16, 256, 3, 3]", style=solid]; -"303 Conv_338" -> "313 Reshape_348" [label="[1, 16, 50, 50]", style=solid]; +"303 Conv_338" -> "316 Reshape_348" [label="[1, 16, 50, 50]", style=solid]; "304 Constant_339" -> "306 Gather_341" [label="[]", style=dashed]; "305 Shape_340" -> "306 Gather_341" [label="[4]", style=dashed]; -"306 Gather_341" -> "309 Unsqueeze_344" [label="[]", style=dashed]; -"307 Constant_342" -> "310 Unsqueeze_345" [label="[]", style=dashed]; -"308 Constant_343" -> "311 Unsqueeze_346" [label="[]", style=dashed]; -"309 Unsqueeze_344" -> "312 Concat_347" [label="[1]", style=dashed]; -"310 Unsqueeze_345" -> "312 Concat_347" [label="[1]", style=dashed]; -"311 Unsqueeze_346" -> "312 Concat_347" [label="[1]", style=dashed]; -"312 Concat_347" -> "313 Reshape_348" [label="[3]", style=dashed]; -"313 Reshape_348" -> "459 Concat_470" [label="[]", style=solid]; -"314 QuantizeLinear_backbone.conf.0.weight_1" -> "315 DequantizeLinear_backbone.conf.0.weight_1" [label="[324, 256, 3, 3]", style=dashed]; -"315 DequantizeLinear_backbone.conf.0.weight_1" -> "316 Conv_349" [label="[324, 256, 3, 3]", style=solid]; -"316 Conv_349" -> "326 Reshape_359" [label="[1, 324, 50, 50]", style=solid]; -"317 Constant_350" -> "319 Gather_352" [label="[]", style=dashed]; -"318 Shape_351" -> "319 Gather_352" [label="[4]", style=dashed]; -"319 Gather_352" -> "322 Unsqueeze_355" [label="[]", style=dashed]; -"320 Constant_353" -> "323 Unsqueeze_356" [label="[]", style=dashed]; -"321 Constant_354" -> "324 Unsqueeze_357" [label="[]", style=dashed]; -"322 Unsqueeze_355" -> "325 Concat_358" [label="[1]", style=dashed]; -"323 Unsqueeze_356" -> "325 Concat_358" [label="[1]", style=dashed]; -"324 Unsqueeze_357" -> "325 Concat_358" [label="[1]", style=dashed]; -"325 Concat_358" -> "326 Reshape_359" [label="[3]", style=dashed]; -"326 Reshape_359" -> "460 Concat_471" [label="[]", style=solid]; -"327 QuantizeLinear_backbone.loc.1.weight_1" -> "328 DequantizeLinear_backbone.loc.1.weight_1" [label="[24, 512, 3, 3]", style=dashed]; -"328 DequantizeLinear_backbone.loc.1.weight_1" -> "329 Conv_360" [label="[24, 512, 3, 3]", style=solid]; -"329 Conv_360" -> "339 Reshape_370" [label="[1, 24, 25, 25]", style=solid]; -"330 Constant_361" -> "332 Gather_363" [label="[]", style=dashed]; -"331 Shape_362" -> "332 Gather_363" [label="[4]", style=dashed]; -"332 Gather_363" -> "335 Unsqueeze_366" [label="[]", style=dashed]; -"333 Constant_364" -> "336 Unsqueeze_367" [label="[]", style=dashed]; -"334 Constant_365" -> "337 Unsqueeze_368" [label="[]", style=dashed]; -"335 Unsqueeze_366" -> "338 Concat_369" [label="[1]", style=dashed]; -"336 Unsqueeze_367" -> "338 Concat_369" [label="[1]", style=dashed]; -"337 Unsqueeze_368" -> "338 Concat_369" [label="[1]", style=dashed]; -"338 Concat_369" -> "339 Reshape_370" [label="[3]", style=dashed]; -"339 Reshape_370" -> "459 Concat_470" [label="[]", style=solid]; -"340 QuantizeLinear_backbone.conf.1.weight_1" -> "341 DequantizeLinear_backbone.conf.1.weight_1" [label="[486, 512, 3, 3]", style=dashed]; -"341 DequantizeLinear_backbone.conf.1.weight_1" -> "342 Conv_371" [label="[486, 512, 3, 3]", style=solid]; -"342 Conv_371" -> "352 Reshape_381" [label="[1, 486, 25, 25]", style=solid]; -"343 Constant_372" -> "345 Gather_374" [label="[]", style=dashed]; -"344 Shape_373" -> "345 Gather_374" [label="[4]", style=dashed]; -"345 Gather_374" -> "348 Unsqueeze_377" [label="[]", style=dashed]; -"346 Constant_375" -> "349 Unsqueeze_378" [label="[]", style=dashed]; -"347 Constant_376" -> "350 Unsqueeze_379" [label="[]", style=dashed]; -"348 Unsqueeze_377" -> "351 Concat_380" [label="[1]", style=dashed]; -"349 Unsqueeze_378" -> "351 Concat_380" [label="[1]", style=dashed]; -"350 Unsqueeze_379" -> "351 Concat_380" [label="[1]", style=dashed]; -"351 Concat_380" -> "352 Reshape_381" [label="[3]", style=dashed]; -"352 Reshape_381" -> "460 Concat_471" [label="[]", style=solid]; -"353 QuantizeLinear_backbone.loc.2.weight_1" -> "354 DequantizeLinear_backbone.loc.2.weight_1" [label="[24, 512, 3, 3]", style=dashed]; -"354 DequantizeLinear_backbone.loc.2.weight_1" -> "355 Conv_382" [label="[24, 512, 3, 3]", style=solid]; -"355 Conv_382" -> "365 Reshape_392" [label="[1, 24, 13, 13]", style=solid]; -"356 Constant_383" -> "358 Gather_385" [label="[]", style=dashed]; -"357 Shape_384" -> "358 Gather_385" [label="[4]", style=dashed]; -"358 Gather_385" -> "361 Unsqueeze_388" [label="[]", style=dashed]; -"359 Constant_386" -> "362 Unsqueeze_389" [label="[]", style=dashed]; -"360 Constant_387" -> "363 Unsqueeze_390" [label="[]", style=dashed]; -"361 Unsqueeze_388" -> "364 Concat_391" [label="[1]", style=dashed]; -"362 Unsqueeze_389" -> "364 Concat_391" [label="[1]", style=dashed]; -"363 Unsqueeze_390" -> "364 Concat_391" [label="[1]", style=dashed]; -"364 Concat_391" -> "365 Reshape_392" [label="[3]", style=dashed]; -"365 Reshape_392" -> "459 Concat_470" [label="[]", style=solid]; -"366 QuantizeLinear_backbone.conf.2.weight_1" -> "367 DequantizeLinear_backbone.conf.2.weight_1" [label="[486, 512, 3, 3]", style=dashed]; -"367 DequantizeLinear_backbone.conf.2.weight_1" -> "368 Conv_393" [label="[486, 512, 3, 3]", style=solid]; -"368 Conv_393" -> "378 Reshape_403" [label="[1, 486, 13, 13]", style=solid]; -"369 Constant_394" -> "371 Gather_396" [label="[]", style=dashed]; -"370 Shape_395" -> "371 Gather_396" [label="[4]", style=dashed]; -"371 Gather_396" -> "374 Unsqueeze_399" [label="[]", style=dashed]; -"372 Constant_397" -> "375 Unsqueeze_400" [label="[]", style=dashed]; -"373 Constant_398" -> "376 Unsqueeze_401" [label="[]", style=dashed]; -"374 Unsqueeze_399" -> "377 Concat_402" [label="[1]", style=dashed]; -"375 Unsqueeze_400" -> "377 Concat_402" [label="[1]", style=dashed]; -"376 Unsqueeze_401" -> "377 Concat_402" [label="[1]", style=dashed]; -"377 Concat_402" -> "378 Reshape_403" [label="[3]", style=dashed]; -"378 Reshape_403" -> "460 Concat_471" [label="[]", style=solid]; -"379 QuantizeLinear_backbone.loc.3.weight_1" -> "380 DequantizeLinear_backbone.loc.3.weight_1" [label="[24, 256, 3, 3]", style=dashed]; -"380 DequantizeLinear_backbone.loc.3.weight_1" -> "381 Conv_404" [label="[24, 256, 3, 3]", style=solid]; -"381 Conv_404" -> "391 Reshape_414" [label="[1, 24, 7, 7]", style=solid]; -"382 Constant_405" -> "384 Gather_407" [label="[]", style=dashed]; -"383 Shape_406" -> "384 Gather_407" [label="[4]", style=dashed]; -"384 Gather_407" -> "387 Unsqueeze_410" [label="[]", style=dashed]; -"385 Constant_408" -> "388 Unsqueeze_411" [label="[]", style=dashed]; -"386 Constant_409" -> "389 Unsqueeze_412" [label="[]", style=dashed]; -"387 Unsqueeze_410" -> "390 Concat_413" [label="[1]", style=dashed]; -"388 Unsqueeze_411" -> "390 Concat_413" [label="[1]", style=dashed]; -"389 Unsqueeze_412" -> "390 Concat_413" [label="[1]", style=dashed]; -"390 Concat_413" -> "391 Reshape_414" [label="[3]", style=dashed]; -"391 Reshape_414" -> "459 Concat_470" [label="[]", style=solid]; -"392 QuantizeLinear_backbone.conf.3.weight_1" -> "393 DequantizeLinear_backbone.conf.3.weight_1" [label="[486, 256, 3, 3]", style=dashed]; -"393 DequantizeLinear_backbone.conf.3.weight_1" -> "394 Conv_415" [label="[486, 256, 3, 3]", style=solid]; -"394 Conv_415" -> "404 Reshape_425" [label="[1, 486, 7, 7]", style=solid]; -"395 Constant_416" -> "397 Gather_418" [label="[]", style=dashed]; -"396 Shape_417" -> "397 Gather_418" [label="[4]", style=dashed]; -"397 Gather_418" -> "400 Unsqueeze_421" [label="[]", style=dashed]; -"398 Constant_419" -> "401 Unsqueeze_422" [label="[]", style=dashed]; -"399 Constant_420" -> "402 Unsqueeze_423" [label="[]", style=dashed]; -"400 Unsqueeze_421" -> "403 Concat_424" [label="[1]", style=dashed]; -"401 Unsqueeze_422" -> "403 Concat_424" [label="[1]", style=dashed]; -"402 Unsqueeze_423" -> "403 Concat_424" [label="[1]", style=dashed]; -"403 Concat_424" -> "404 Reshape_425" [label="[3]", style=dashed]; -"404 Reshape_425" -> "460 Concat_471" [label="[]", style=solid]; -"405 QuantizeLinear_backbone.loc.4.weight_1" -> "406 DequantizeLinear_backbone.loc.4.weight_1" [label="[16, 256, 3, 3]", style=dashed]; -"406 DequantizeLinear_backbone.loc.4.weight_1" -> "407 Conv_426" [label="[16, 256, 3, 3]", style=solid]; -"407 Conv_426" -> "417 Reshape_436" [label="[1, 16, 3, 3]", style=solid]; -"408 Constant_427" -> "410 Gather_429" [label="[]", style=dashed]; -"409 Shape_428" -> "410 Gather_429" [label="[4]", style=dashed]; -"410 Gather_429" -> "413 Unsqueeze_432" [label="[]", style=dashed]; -"411 Constant_430" -> "414 Unsqueeze_433" [label="[]", style=dashed]; -"412 Constant_431" -> "415 Unsqueeze_434" [label="[]", style=dashed]; -"413 Unsqueeze_432" -> "416 Concat_435" [label="[1]", style=dashed]; -"414 Unsqueeze_433" -> "416 Concat_435" [label="[1]", style=dashed]; -"415 Unsqueeze_434" -> "416 Concat_435" [label="[1]", style=dashed]; -"416 Concat_435" -> "417 Reshape_436" [label="[3]", style=dashed]; -"417 Reshape_436" -> "459 Concat_470" [label="[]", style=solid]; -"418 QuantizeLinear_backbone.conf.4.weight_1" -> "419 DequantizeLinear_backbone.conf.4.weight_1" [label="[324, 256, 3, 3]", style=dashed]; -"419 DequantizeLinear_backbone.conf.4.weight_1" -> "420 Conv_437" [label="[324, 256, 3, 3]", style=solid]; -"420 Conv_437" -> "430 Reshape_447" [label="[1, 324, 3, 3]", style=solid]; -"421 Constant_438" -> "423 Gather_440" [label="[]", style=dashed]; -"422 Shape_439" -> "423 Gather_440" [label="[4]", style=dashed]; -"423 Gather_440" -> "426 Unsqueeze_443" [label="[]", style=dashed]; -"424 Constant_441" -> "427 Unsqueeze_444" [label="[]", style=dashed]; -"425 Constant_442" -> "428 Unsqueeze_445" [label="[]", style=dashed]; -"426 Unsqueeze_443" -> "429 Concat_446" [label="[1]", style=dashed]; -"427 Unsqueeze_444" -> "429 Concat_446" [label="[1]", style=dashed]; -"428 Unsqueeze_445" -> "429 Concat_446" [label="[1]", style=dashed]; -"429 Concat_446" -> "430 Reshape_447" [label="[3]", style=dashed]; -"430 Reshape_447" -> "460 Concat_471" [label="[]", style=solid]; -"431 QuantizeLinear_Relu_337_1" -> "432 DequantizeLinear_Relu_337_1" [label="[1, 256, 7, 7]", style=dashed]; -"432 DequantizeLinear_Relu_337_1" -> "435 Conv_448" [label="[1, 256, 7, 7]", style=solid]; -"432 DequantizeLinear_Relu_337_1" -> "437 Shape_450" [label="[1, 256, 7, 7]", style=solid]; -"432 DequantizeLinear_Relu_337_1" -> "448 Conv_459" [label="[1, 256, 7, 7]", style=solid]; -"432 DequantizeLinear_Relu_337_1" -> "450 Shape_461" [label="[1, 256, 7, 7]", style=solid]; -"433 QuantizeLinear_backbone.loc.5.weight_1" -> "434 DequantizeLinear_backbone.loc.5.weight_1" [label="[16, 256, 3, 3]", style=dashed]; -"434 DequantizeLinear_backbone.loc.5.weight_1" -> "435 Conv_448" [label="[16, 256, 3, 3]", style=solid]; -"435 Conv_448" -> "445 Reshape_458" [label="[1, 16, 3, 3]", style=solid]; -"436 Constant_449" -> "438 Gather_451" [label="[]", style=dashed]; -"437 Shape_450" -> "438 Gather_451" [label="[4]", style=dashed]; -"438 Gather_451" -> "441 Unsqueeze_454" [label="[]", style=dashed]; -"439 Constant_452" -> "442 Unsqueeze_455" [label="[]", style=dashed]; -"440 Constant_453" -> "443 Unsqueeze_456" [label="[]", style=dashed]; -"441 Unsqueeze_454" -> "444 Concat_457" [label="[1]", style=dashed]; -"442 Unsqueeze_455" -> "444 Concat_457" [label="[1]", style=dashed]; -"443 Unsqueeze_456" -> "444 Concat_457" [label="[1]", style=dashed]; -"444 Concat_457" -> "445 Reshape_458" [label="[3]", style=dashed]; -"445 Reshape_458" -> "459 Concat_470" [label="[]", style=solid]; -"446 QuantizeLinear_backbone.conf.5.weight_1" -> "447 DequantizeLinear_backbone.conf.5.weight_1" [label="[324, 256, 3, 3]", style=dashed]; -"447 DequantizeLinear_backbone.conf.5.weight_1" -> "448 Conv_459" [label="[324, 256, 3, 3]", style=solid]; -"448 Conv_459" -> "458 Reshape_469" [label="[1, 324, 3, 3]", style=solid]; -"449 Constant_460" -> "451 Gather_462" [label="[]", style=dashed]; -"450 Shape_461" -> "451 Gather_462" [label="[4]", style=dashed]; -"451 Gather_462" -> "454 Unsqueeze_465" [label="[]", style=dashed]; -"452 Constant_463" -> "455 Unsqueeze_466" [label="[]", style=dashed]; -"453 Constant_464" -> "456 Unsqueeze_467" [label="[]", style=dashed]; -"454 Unsqueeze_465" -> "457 Concat_468" [label="[1]", style=dashed]; -"455 Unsqueeze_466" -> "457 Concat_468" [label="[1]", style=dashed]; -"456 Unsqueeze_467" -> "457 Concat_468" [label="[1]", style=dashed]; -"457 Concat_468" -> "458 Reshape_469" [label="[3]", style=dashed]; -"458 Reshape_469" -> "460 Concat_471" [label="[]", style=solid]; -"459 Concat_470" -> "461 Transpose_472" [label="[]", style=solid]; -"460 Concat_471" -> "462 Transpose_473" [label="[]", style=solid]; -"461 Transpose_472" -> "467 Slice_478" [label="[]", style=solid]; -"461 Transpose_472" -> "484 Slice_495" [label="[]", style=solid]; -"462 Transpose_473" -> "649 Softmax_660" [label="[]", style=solid]; -"463 Constant_474" -> "467 Slice_478" [label="[1]", style=dashed]; -"464 Constant_475" -> "467 Slice_478" [label="[1]", style=dashed]; -"465 Constant_476" -> "467 Slice_478" [label="[1]", style=dashed]; -"466 Constant_477" -> "467 Slice_478" [label="[1]", style=dashed]; -"467 Slice_478" -> "472 Slice_483" [label="[]", style=solid]; -"468 Constant_479" -> "472 Slice_483" [label="[1]", style=dashed]; -"469 Constant_480" -> "472 Slice_483" [label="[1]", style=dashed]; -"470 Constant_481" -> "472 Slice_483" [label="[1]", style=dashed]; -"471 Constant_482" -> "472 Slice_483" [label="[1]", style=dashed]; -"472 Slice_483" -> "477 Slice_488" [label="[]", style=solid]; -"473 Constant_484" -> "477 Slice_488" [label="[1]", style=dashed]; -"474 Constant_485" -> "477 Slice_488" [label="[1]", style=dashed]; -"475 Constant_486" -> "477 Slice_488" [label="[1]", style=dashed]; -"476 Constant_487" -> "477 Slice_488" [label="[1]", style=dashed]; -"477 Slice_488" -> "479 Mul_490" [label="[]", style=solid]; -"478 Constant_489" -> "479 Mul_490" [label="[]", style=solid]; -"479 Mul_490" -> "498 Mul_509" [label="[]", style=solid]; -"480 Constant_491" -> "484 Slice_495" [label="[1]", style=dashed]; -"481 Constant_492" -> "484 Slice_495" [label="[1]", style=dashed]; -"482 Constant_493" -> "484 Slice_495" [label="[1]", style=dashed]; -"483 Constant_494" -> "484 Slice_495" [label="[1]", style=dashed]; -"484 Slice_495" -> "489 Slice_500" [label="[]", style=solid]; -"485 Constant_496" -> "489 Slice_500" [label="[1]", style=dashed]; -"486 Constant_497" -> "489 Slice_500" [label="[1]", style=dashed]; -"487 Constant_498" -> "489 Slice_500" [label="[1]", style=dashed]; -"488 Constant_499" -> "489 Slice_500" [label="[1]", style=dashed]; -"489 Slice_500" -> "494 Slice_505" [label="[]", style=solid]; -"490 Constant_501" -> "494 Slice_505" [label="[1]", style=dashed]; -"491 Constant_502" -> "494 Slice_505" [label="[1]", style=dashed]; -"492 Constant_503" -> "494 Slice_505" [label="[1]", style=dashed]; -"493 Constant_504" -> "494 Slice_505" [label="[1]", style=dashed]; -"494 Slice_505" -> "496 Mul_507" [label="[]", style=solid]; -"495 Constant_506" -> "496 Mul_507" [label="[]", style=solid]; -"496 Mul_507" -> "501 Exp_512" [label="[]", style=solid]; -"497 Constant_508" -> "498 Mul_509" [label="[1, 15130, 2]", style=solid]; -"498 Mul_509" -> "500 Add_511" [label="[]", style=solid]; -"499 Constant_510" -> "500 Add_511" [label="[1, 15130, 2]", style=solid]; -"500 Add_511" -> "508 Slice_519" [label="[]", style=solid]; -"500 Add_511" -> "543 Slice_554" [label="[]", style=solid]; -"500 Add_511" -> "578 Slice_589" [label="[]", style=solid]; -"500 Add_511" -> "613 Slice_624" [label="[]", style=solid]; -"501 Exp_512" -> "503 Mul_514" [label="[]", style=solid]; -"502 Constant_513" -> "503 Mul_514" [label="[1, 15130, 2]", style=solid]; -"503 Mul_514" -> "524 Slice_535" [label="[]", style=solid]; -"503 Mul_514" -> "559 Slice_570" [label="[]", style=solid]; -"503 Mul_514" -> "594 Slice_605" [label="[]", style=solid]; -"503 Mul_514" -> "629 Slice_640" [label="[]", style=solid]; -"504 Constant_515" -> "508 Slice_519" [label="[1]", style=dashed]; -"505 Constant_516" -> "508 Slice_519" [label="[1]", style=dashed]; -"506 Constant_517" -> "508 Slice_519" [label="[1]", style=dashed]; -"507 Constant_518" -> "508 Slice_519" [label="[1]", style=dashed]; -"508 Slice_519" -> "513 Slice_524" [label="[]", style=solid]; -"509 Constant_520" -> "513 Slice_524" [label="[1]", style=dashed]; -"510 Constant_521" -> "513 Slice_524" [label="[1]", style=dashed]; -"511 Constant_522" -> "513 Slice_524" [label="[1]", style=dashed]; -"512 Constant_523" -> "513 Slice_524" [label="[1]", style=dashed]; -"513 Slice_524" -> "518 Slice_529" [label="[]", style=solid]; -"514 Constant_525" -> "518 Slice_529" [label="[1]", style=dashed]; -"515 Constant_526" -> "518 Slice_529" [label="[1]", style=dashed]; -"516 Constant_527" -> "518 Slice_529" [label="[1]", style=dashed]; -"517 Constant_528" -> "518 Slice_529" [label="[1]", style=dashed]; -"518 Slice_529" -> "519 Squeeze_530" [label="[]", style=solid]; -"519 Squeeze_530" -> "538 Sub_549" [label="[]", style=solid]; -"520 Constant_531" -> "524 Slice_535" [label="[1]", style=dashed]; -"521 Constant_532" -> "524 Slice_535" [label="[1]", style=dashed]; -"522 Constant_533" -> "524 Slice_535" [label="[1]", style=dashed]; -"523 Constant_534" -> "524 Slice_535" [label="[1]", style=dashed]; -"524 Slice_535" -> "529 Slice_540" [label="[]", style=solid]; -"525 Constant_536" -> "529 Slice_540" [label="[1]", style=dashed]; -"526 Constant_537" -> "529 Slice_540" [label="[1]", style=dashed]; -"527 Constant_538" -> "529 Slice_540" [label="[1]", style=dashed]; -"528 Constant_539" -> "529 Slice_540" [label="[1]", style=dashed]; -"529 Slice_540" -> "534 Slice_545" [label="[]", style=solid]; -"530 Constant_541" -> "534 Slice_545" [label="[1]", style=dashed]; -"531 Constant_542" -> "534 Slice_545" [label="[1]", style=dashed]; -"532 Constant_543" -> "534 Slice_545" [label="[1]", style=dashed]; -"533 Constant_544" -> "534 Slice_545" [label="[1]", style=dashed]; -"534 Slice_545" -> "535 Squeeze_546" [label="[]", style=solid]; -"535 Squeeze_546" -> "537 Mul_548" [label="[]", style=solid]; -"536 Constant_547" -> "537 Mul_548" [label="[]", style=solid]; -"537 Mul_548" -> "538 Sub_549" [label="[]", style=solid]; -"538 Sub_549" -> "644 Unsqueeze_655" [label="[]", style=solid]; -"539 Constant_550" -> "543 Slice_554" [label="[1]", style=dashed]; -"540 Constant_551" -> "543 Slice_554" [label="[1]", style=dashed]; -"541 Constant_552" -> "543 Slice_554" [label="[1]", style=dashed]; -"542 Constant_553" -> "543 Slice_554" [label="[1]", style=dashed]; -"543 Slice_554" -> "548 Slice_559" [label="[]", style=solid]; -"544 Constant_555" -> "548 Slice_559" [label="[1]", style=dashed]; -"545 Constant_556" -> "548 Slice_559" [label="[1]", style=dashed]; -"546 Constant_557" -> "548 Slice_559" [label="[1]", style=dashed]; -"547 Constant_558" -> "548 Slice_559" [label="[1]", style=dashed]; -"548 Slice_559" -> "553 Slice_564" [label="[]", style=solid]; -"549 Constant_560" -> "553 Slice_564" [label="[1]", style=dashed]; -"550 Constant_561" -> "553 Slice_564" [label="[1]", style=dashed]; -"551 Constant_562" -> "553 Slice_564" [label="[1]", style=dashed]; -"552 Constant_563" -> "553 Slice_564" [label="[1]", style=dashed]; -"553 Slice_564" -> "554 Squeeze_565" [label="[]", style=solid]; -"554 Squeeze_565" -> "573 Sub_584" [label="[]", style=solid]; -"555 Constant_566" -> "559 Slice_570" [label="[1]", style=dashed]; -"556 Constant_567" -> "559 Slice_570" [label="[1]", style=dashed]; -"557 Constant_568" -> "559 Slice_570" [label="[1]", style=dashed]; -"558 Constant_569" -> "559 Slice_570" [label="[1]", style=dashed]; -"559 Slice_570" -> "564 Slice_575" [label="[]", style=solid]; -"560 Constant_571" -> "564 Slice_575" [label="[1]", style=dashed]; -"561 Constant_572" -> "564 Slice_575" [label="[1]", style=dashed]; -"562 Constant_573" -> "564 Slice_575" [label="[1]", style=dashed]; -"563 Constant_574" -> "564 Slice_575" [label="[1]", style=dashed]; -"564 Slice_575" -> "569 Slice_580" [label="[]", style=solid]; -"565 Constant_576" -> "569 Slice_580" [label="[1]", style=dashed]; -"566 Constant_577" -> "569 Slice_580" [label="[1]", style=dashed]; -"567 Constant_578" -> "569 Slice_580" [label="[1]", style=dashed]; -"568 Constant_579" -> "569 Slice_580" [label="[1]", style=dashed]; -"569 Slice_580" -> "570 Squeeze_581" [label="[]", style=solid]; -"570 Squeeze_581" -> "572 Mul_583" [label="[]", style=solid]; -"571 Constant_582" -> "572 Mul_583" [label="[]", style=solid]; -"572 Mul_583" -> "573 Sub_584" [label="[]", style=solid]; -"573 Sub_584" -> "645 Unsqueeze_656" [label="[]", style=solid]; -"574 Constant_585" -> "578 Slice_589" [label="[1]", style=dashed]; -"575 Constant_586" -> "578 Slice_589" [label="[1]", style=dashed]; -"576 Constant_587" -> "578 Slice_589" [label="[1]", style=dashed]; -"577 Constant_588" -> "578 Slice_589" [label="[1]", style=dashed]; -"578 Slice_589" -> "583 Slice_594" [label="[]", style=solid]; -"579 Constant_590" -> "583 Slice_594" [label="[1]", style=dashed]; -"580 Constant_591" -> "583 Slice_594" [label="[1]", style=dashed]; -"581 Constant_592" -> "583 Slice_594" [label="[1]", style=dashed]; -"582 Constant_593" -> "583 Slice_594" [label="[1]", style=dashed]; -"583 Slice_594" -> "588 Slice_599" [label="[]", style=solid]; -"584 Constant_595" -> "588 Slice_599" [label="[1]", style=dashed]; -"585 Constant_596" -> "588 Slice_599" [label="[1]", style=dashed]; -"586 Constant_597" -> "588 Slice_599" [label="[1]", style=dashed]; -"587 Constant_598" -> "588 Slice_599" [label="[1]", style=dashed]; -"588 Slice_599" -> "589 Squeeze_600" [label="[]", style=solid]; -"589 Squeeze_600" -> "608 Add_619" [label="[]", style=solid]; -"590 Constant_601" -> "594 Slice_605" [label="[1]", style=dashed]; -"591 Constant_602" -> "594 Slice_605" [label="[1]", style=dashed]; -"592 Constant_603" -> "594 Slice_605" [label="[1]", style=dashed]; -"593 Constant_604" -> "594 Slice_605" [label="[1]", style=dashed]; -"594 Slice_605" -> "599 Slice_610" [label="[]", style=solid]; -"595 Constant_606" -> "599 Slice_610" [label="[1]", style=dashed]; -"596 Constant_607" -> "599 Slice_610" [label="[1]", style=dashed]; -"597 Constant_608" -> "599 Slice_610" [label="[1]", style=dashed]; -"598 Constant_609" -> "599 Slice_610" [label="[1]", style=dashed]; -"599 Slice_610" -> "604 Slice_615" [label="[]", style=solid]; -"600 Constant_611" -> "604 Slice_615" [label="[1]", style=dashed]; -"601 Constant_612" -> "604 Slice_615" [label="[1]", style=dashed]; -"602 Constant_613" -> "604 Slice_615" [label="[1]", style=dashed]; -"603 Constant_614" -> "604 Slice_615" [label="[1]", style=dashed]; -"604 Slice_615" -> "605 Squeeze_616" [label="[]", style=solid]; -"605 Squeeze_616" -> "607 Mul_618" [label="[]", style=solid]; -"606 Constant_617" -> "607 Mul_618" [label="[]", style=solid]; -"607 Mul_618" -> "608 Add_619" [label="[]", style=solid]; -"608 Add_619" -> "646 Unsqueeze_657" [label="[]", style=solid]; -"609 Constant_620" -> "613 Slice_624" [label="[1]", style=dashed]; -"610 Constant_621" -> "613 Slice_624" [label="[1]", style=dashed]; -"611 Constant_622" -> "613 Slice_624" [label="[1]", style=dashed]; -"612 Constant_623" -> "613 Slice_624" [label="[1]", style=dashed]; -"613 Slice_624" -> "618 Slice_629" [label="[]", style=solid]; -"614 Constant_625" -> "618 Slice_629" [label="[1]", style=dashed]; -"615 Constant_626" -> "618 Slice_629" [label="[1]", style=dashed]; -"616 Constant_627" -> "618 Slice_629" [label="[1]", style=dashed]; -"617 Constant_628" -> "618 Slice_629" [label="[1]", style=dashed]; -"618 Slice_629" -> "623 Slice_634" [label="[]", style=solid]; -"619 Constant_630" -> "623 Slice_634" [label="[1]", style=dashed]; -"620 Constant_631" -> "623 Slice_634" [label="[1]", style=dashed]; -"621 Constant_632" -> "623 Slice_634" [label="[1]", style=dashed]; -"622 Constant_633" -> "623 Slice_634" [label="[1]", style=dashed]; -"623 Slice_634" -> "624 Squeeze_635" [label="[]", style=solid]; -"624 Squeeze_635" -> "643 Add_654" [label="[]", style=solid]; -"625 Constant_636" -> "629 Slice_640" [label="[1]", style=dashed]; -"626 Constant_637" -> "629 Slice_640" [label="[1]", style=dashed]; -"627 Constant_638" -> "629 Slice_640" [label="[1]", style=dashed]; -"628 Constant_639" -> "629 Slice_640" [label="[1]", style=dashed]; -"629 Slice_640" -> "634 Slice_645" [label="[]", style=solid]; -"630 Constant_641" -> "634 Slice_645" [label="[1]", style=dashed]; -"631 Constant_642" -> "634 Slice_645" [label="[1]", style=dashed]; -"632 Constant_643" -> "634 Slice_645" [label="[1]", style=dashed]; -"633 Constant_644" -> "634 Slice_645" [label="[1]", style=dashed]; -"634 Slice_645" -> "639 Slice_650" [label="[]", style=solid]; -"635 Constant_646" -> "639 Slice_650" [label="[1]", style=dashed]; -"636 Constant_647" -> "639 Slice_650" [label="[1]", style=dashed]; -"637 Constant_648" -> "639 Slice_650" [label="[1]", style=dashed]; -"638 Constant_649" -> "639 Slice_650" [label="[1]", style=dashed]; -"639 Slice_650" -> "640 Squeeze_651" [label="[]", style=solid]; -"640 Squeeze_651" -> "642 Mul_653" [label="[]", style=solid]; -"641 Constant_652" -> "642 Mul_653" [label="[]", style=solid]; -"642 Mul_653" -> "643 Add_654" [label="[]", style=solid]; -"643 Add_654" -> "647 Unsqueeze_658" [label="[]", style=solid]; -"644 Unsqueeze_655" -> "648 Concat_659" [label="[]", style=solid]; -"645 Unsqueeze_656" -> "648 Concat_659" [label="[]", style=solid]; -"646 Unsqueeze_657" -> "648 Concat_659" [label="[]", style=solid]; -"647 Unsqueeze_658" -> "648 Concat_659" [label="[]", style=solid]; -"648 Concat_659" -> "672 NonMaxSuppression_683" [label="[]", style=solid]; -"648 Concat_659" -> "707 Squeeze_719" [label="[]", style=solid]; -"649 Softmax_660" -> "650 Transpose_661" [label="[]", style=solid]; -"650 Transpose_661" -> "655 Slice_666" [label="[]", style=solid]; -"651 Constant_662" -> "655 Slice_666" [label="[1]", style=dashed]; -"652 Constant_663" -> "655 Slice_666" [label="[1]", style=dashed]; -"653 Constant_664" -> "655 Slice_666" [label="[1]", style=dashed]; -"654 Constant_665" -> "655 Slice_666" [label="[1]", style=dashed]; -"655 Slice_666" -> "660 Slice_671" [label="[]", style=solid]; -"656 Constant_667" -> "660 Slice_671" [label="[1]", style=dashed]; -"657 Constant_668" -> "660 Slice_671" [label="[1]", style=dashed]; -"658 Constant_669" -> "660 Slice_671" [label="[1]", style=dashed]; -"659 Constant_670" -> "660 Slice_671" [label="[1]", style=dashed]; -"660 Slice_671" -> "665 Slice_676" [label="[]", style=solid]; -"661 Constant_672" -> "665 Slice_676" [label="[1]", style=dashed]; -"662 Constant_673" -> "665 Slice_676" [label="[1]", style=dashed]; -"663 Constant_674" -> "665 Slice_676" [label="[1]", style=dashed]; -"664 Constant_675" -> "665 Slice_676" [label="[1]", style=dashed]; -"665 Slice_676" -> "672 NonMaxSuppression_683" [label="[]", style=solid]; -"665 Slice_676" -> "688 Reshape_699" [label="[]", style=solid]; -"665 Slice_676" -> "689 Shape_700" [label="[]", style=solid]; -"666 Constant_677" -> "667 ConstantOfShape_678" [label="[1]", style=dashed]; -"667 ConstantOfShape_678" -> "672 NonMaxSuppression_683" [label="[1]", style=dashed]; -"668 Constant_679" -> "669 ConstantOfShape_680" [label="[1]", style=dashed]; -"669 ConstantOfShape_680" -> "672 NonMaxSuppression_683" [label="[1]", style=solid]; -"670 Constant_681" -> "671 ConstantOfShape_682" [label="[1]", style=dashed]; -"671 ConstantOfShape_682" -> "672 NonMaxSuppression_683" [label="[1]", style=solid]; -"672 NonMaxSuppression_683" -> "677 Slice_688" [label="[-1, 3]", style=dashed]; -"672 NonMaxSuppression_683" -> "684 Slice_695" [label="[-1, 3]", style=dashed]; -"673 Constant_684" -> "677 Slice_688" [label="[1]", style=dashed]; -"674 Constant_685" -> "677 Slice_688" [label="[1]", style=dashed]; -"675 Constant_686" -> "677 Slice_688" [label="[1]", style=dashed]; -"676 Constant_687" -> "677 Slice_688" [label="[1]", style=dashed]; -"677 Slice_688" -> "679 Gather_690" [label="[-1, 3]", style=dashed]; -"678 Constant_689" -> "679 Gather_690" [label="[]", style=dashed]; -"679 Gather_690" -> "692 Mul_703" [label="[-1]", style=dashed]; -"679 Gather_690" -> "717 Gather_729" [label="[-1]", style=dashed]; -"680 Constant_691" -> "684 Slice_695" [label="[1]", style=dashed]; -"681 Constant_692" -> "684 Slice_695" [label="[1]", style=dashed]; -"682 Constant_693" -> "684 Slice_695" [label="[1]", style=dashed]; -"683 Constant_694" -> "684 Slice_695" [label="[1]", style=dashed]; -"684 Slice_695" -> "686 Gather_697" [label="[-1, 3]", style=dashed]; -"685 Constant_696" -> "686 Gather_697" [label="[]", style=dashed]; -"686 Gather_697" -> "693 Add_704" [label="[-1]", style=dashed]; -"686 Gather_697" -> "708 Gather_720" [label="[-1]", style=dashed]; -"687 Constant_698" -> "688 Reshape_699" [label="[1]", style=dashed]; -"688 Reshape_699" -> "695 Gather_706" [label="[-1]", style=solid]; -"689 Shape_700" -> "691 Gather_702" [label="[-1]", style=dashed]; -"690 Constant_701" -> "691 Gather_702" [label="[]", style=dashed]; -"691 Gather_702" -> "692 Mul_703" [label="[]", style=dashed]; -"692 Mul_703" -> "693 Add_704" [label="[-1]", style=dashed]; -"693 Add_704" -> "694 Cast_705" [label="[-1]", style=dashed]; -"694 Cast_705" -> "695 Gather_706" [label="[-1]", style=dashed]; -"695 Gather_706" -> "696 Shape_707" [label="[-1]", style=solid]; -"695 Gather_706" -> "706 TopK_717" [label="[-1]", style=solid]; -"695 Gather_706" -> "721 Gather_733" [label="[-1]", style=solid]; -"696 Shape_707" -> "698 Gather_709" [label="[1]", style=dashed]; -"697 Constant_708" -> "698 Gather_709" [label="[]", style=dashed]; -"698 Gather_709" -> "699 Unsqueeze_710" [label="[]", style=dashed]; -"699 Unsqueeze_710" -> "701 Concat_712" [label="[1]", style=dashed]; -"700 Constant_711" -> "701 Concat_712" [label="[1]", style=dashed]; -"701 Concat_712" -> "702 Cast_713" [label="[2]", style=dashed]; -"702 Cast_713" -> "703 ReduceMin_714" [label="[2]", style=dashed]; -"703 ReduceMin_714" -> "704 Cast_715" [label="[]", style=dashed]; -"704 Cast_715" -> "705 Unsqueeze_716" [label="[]", style=dashed]; -"705 Unsqueeze_716" -> "706 TopK_717" [label="[1]", style=dashed]; -"706 TopK_717" -> "708 Gather_720" [label="[-1]", style=dashed]; -"706 TopK_717" -> "717 Gather_729" [label="[-1]", style=dashed]; -"706 TopK_717" -> "721 Gather_733" [label="[-1]", style=dashed]; -"707 Squeeze_719" -> "713 Slice_725" [label="[]", style=solid]; -"708 Gather_720" -> "714 Cast_726" [label="[-1]", style=dashed]; -"709 Constant_721" -> "713 Slice_725" [label="[1]", style=dashed]; -"710 Constant_722" -> "713 Slice_725" [label="[1]", style=dashed]; -"711 Constant_723" -> "713 Slice_725" [label="[1]", style=dashed]; -"712 Constant_724" -> "713 Slice_725" [label="[1]", style=dashed]; -"713 Slice_725" -> "715 Gather_727" [label="[]", style=solid]; -"714 Cast_726" -> "715 Gather_727" [label="[-1]", style=dashed]; -"715 Gather_727" -> "716 Unsqueeze_bboxes" [label="[]", style=solid]; -"716 Unsqueeze_bboxes" -> "724 nncf_model_output_0" [label="[1, -1, 4]", style=solid]; -"717 Gather_729" -> "718 Unsqueeze_730" [label="[-1]", style=dashed]; -"718 Unsqueeze_730" -> "720 Add_labels" [label="[1, -1]", style=dashed]; -"719 Constant_731" -> "720 Add_labels" [label="[]", style=dashed]; -"720 Add_labels" -> "725 nncf_model_output_1" [label="[1, -1]", style=dashed]; -"721 Gather_733" -> "722 Unsqueeze_scores" [label="[-1]", style=solid]; -"722 Unsqueeze_scores" -> "726 nncf_model_output_2" [label="[1, -1]", style=solid]; -"723 nncf_model_input_0" -> "0 QuantizeLinear_image_1" [label="[1, 3, 1200, 1200]", style=solid]; +"306 Gather_341" -> "310 Unsqueeze_344" [label="[]", style=dashed]; +"307 Constant_342" -> "312 Unsqueeze_345" [label="[]", style=dashed]; +"308 Constant_343" -> "314 Unsqueeze_346" [label="[]", style=dashed]; +"309 Constant_nncf_125" -> "310 Unsqueeze_344" [label="[1]", style=dashed]; +"310 Unsqueeze_344" -> "315 Concat_347" [label="[1]", style=dashed]; +"311 Constant_nncf_127" -> "312 Unsqueeze_345" [label="[1]", style=dashed]; +"312 Unsqueeze_345" -> "315 Concat_347" [label="[1]", style=dashed]; +"313 Constant_nncf_129" -> "314 Unsqueeze_346" [label="[1]", style=dashed]; +"314 Unsqueeze_346" -> "315 Concat_347" [label="[1]", style=dashed]; +"315 Concat_347" -> "316 Reshape_348" [label="[3]", style=dashed]; +"316 Reshape_348" -> "495 Concat_470" [label="[]", style=solid]; +"317 QuantizeLinear_backbone.conf.0.weight_1" -> "318 DequantizeLinear_backbone.conf.0.weight_1" [label="[324, 256, 3, 3]", style=dashed]; +"318 DequantizeLinear_backbone.conf.0.weight_1" -> "319 Conv_349" [label="[324, 256, 3, 3]", style=solid]; +"319 Conv_349" -> "332 Reshape_359" [label="[1, 324, 50, 50]", style=solid]; +"320 Constant_350" -> "322 Gather_352" [label="[]", style=dashed]; +"321 Shape_351" -> "322 Gather_352" [label="[4]", style=dashed]; +"322 Gather_352" -> "326 Unsqueeze_355" [label="[]", style=dashed]; +"323 Constant_353" -> "328 Unsqueeze_356" [label="[]", style=dashed]; +"324 Constant_354" -> "330 Unsqueeze_357" [label="[]", style=dashed]; +"325 Constant_nncf_139" -> "326 Unsqueeze_355" [label="[1]", style=dashed]; +"326 Unsqueeze_355" -> "331 Concat_358" [label="[1]", style=dashed]; +"327 Constant_nncf_141" -> "328 Unsqueeze_356" [label="[1]", style=dashed]; +"328 Unsqueeze_356" -> "331 Concat_358" [label="[1]", style=dashed]; +"329 Constant_nncf_143" -> "330 Unsqueeze_357" [label="[1]", style=dashed]; +"330 Unsqueeze_357" -> "331 Concat_358" [label="[1]", style=dashed]; +"331 Concat_358" -> "332 Reshape_359" [label="[3]", style=dashed]; +"332 Reshape_359" -> "496 Concat_471" [label="[]", style=solid]; +"333 QuantizeLinear_backbone.loc.1.weight_1" -> "334 DequantizeLinear_backbone.loc.1.weight_1" [label="[24, 512, 3, 3]", style=dashed]; +"334 DequantizeLinear_backbone.loc.1.weight_1" -> "335 Conv_360" [label="[24, 512, 3, 3]", style=solid]; +"335 Conv_360" -> "348 Reshape_370" [label="[1, 24, 25, 25]", style=solid]; +"336 Constant_361" -> "338 Gather_363" [label="[]", style=dashed]; +"337 Shape_362" -> "338 Gather_363" [label="[4]", style=dashed]; +"338 Gather_363" -> "342 Unsqueeze_366" [label="[]", style=dashed]; +"339 Constant_364" -> "344 Unsqueeze_367" [label="[]", style=dashed]; +"340 Constant_365" -> "346 Unsqueeze_368" [label="[]", style=dashed]; +"341 Constant_nncf_153" -> "342 Unsqueeze_366" [label="[1]", style=dashed]; +"342 Unsqueeze_366" -> "347 Concat_369" [label="[1]", style=dashed]; +"343 Constant_nncf_155" -> "344 Unsqueeze_367" [label="[1]", style=dashed]; +"344 Unsqueeze_367" -> "347 Concat_369" [label="[1]", style=dashed]; +"345 Constant_nncf_157" -> "346 Unsqueeze_368" [label="[1]", style=dashed]; +"346 Unsqueeze_368" -> "347 Concat_369" [label="[1]", style=dashed]; +"347 Concat_369" -> "348 Reshape_370" [label="[3]", style=dashed]; +"348 Reshape_370" -> "495 Concat_470" [label="[]", style=solid]; +"349 QuantizeLinear_backbone.conf.1.weight_1" -> "350 DequantizeLinear_backbone.conf.1.weight_1" [label="[486, 512, 3, 3]", style=dashed]; +"350 DequantizeLinear_backbone.conf.1.weight_1" -> "351 Conv_371" [label="[486, 512, 3, 3]", style=solid]; +"351 Conv_371" -> "364 Reshape_381" [label="[1, 486, 25, 25]", style=solid]; +"352 Constant_372" -> "354 Gather_374" [label="[]", style=dashed]; +"353 Shape_373" -> "354 Gather_374" [label="[4]", style=dashed]; +"354 Gather_374" -> "358 Unsqueeze_377" [label="[]", style=dashed]; +"355 Constant_375" -> "360 Unsqueeze_378" [label="[]", style=dashed]; +"356 Constant_376" -> "362 Unsqueeze_379" [label="[]", style=dashed]; +"357 Constant_nncf_167" -> "358 Unsqueeze_377" [label="[1]", style=dashed]; +"358 Unsqueeze_377" -> "363 Concat_380" [label="[1]", style=dashed]; +"359 Constant_nncf_169" -> "360 Unsqueeze_378" [label="[1]", style=dashed]; +"360 Unsqueeze_378" -> "363 Concat_380" [label="[1]", style=dashed]; +"361 Constant_nncf_171" -> "362 Unsqueeze_379" [label="[1]", style=dashed]; +"362 Unsqueeze_379" -> "363 Concat_380" [label="[1]", style=dashed]; +"363 Concat_380" -> "364 Reshape_381" [label="[3]", style=dashed]; +"364 Reshape_381" -> "496 Concat_471" [label="[]", style=solid]; +"365 QuantizeLinear_backbone.loc.2.weight_1" -> "366 DequantizeLinear_backbone.loc.2.weight_1" [label="[24, 512, 3, 3]", style=dashed]; +"366 DequantizeLinear_backbone.loc.2.weight_1" -> "367 Conv_382" [label="[24, 512, 3, 3]", style=solid]; +"367 Conv_382" -> "380 Reshape_392" [label="[1, 24, 13, 13]", style=solid]; +"368 Constant_383" -> "370 Gather_385" [label="[]", style=dashed]; +"369 Shape_384" -> "370 Gather_385" [label="[4]", style=dashed]; +"370 Gather_385" -> "374 Unsqueeze_388" [label="[]", style=dashed]; +"371 Constant_386" -> "376 Unsqueeze_389" [label="[]", style=dashed]; +"372 Constant_387" -> "378 Unsqueeze_390" [label="[]", style=dashed]; +"373 Constant_nncf_181" -> "374 Unsqueeze_388" [label="[1]", style=dashed]; +"374 Unsqueeze_388" -> "379 Concat_391" [label="[1]", style=dashed]; +"375 Constant_nncf_183" -> "376 Unsqueeze_389" [label="[1]", style=dashed]; +"376 Unsqueeze_389" -> "379 Concat_391" [label="[1]", style=dashed]; +"377 Constant_nncf_185" -> "378 Unsqueeze_390" [label="[1]", style=dashed]; +"378 Unsqueeze_390" -> "379 Concat_391" [label="[1]", style=dashed]; +"379 Concat_391" -> "380 Reshape_392" [label="[3]", style=dashed]; +"380 Reshape_392" -> "495 Concat_470" [label="[]", style=solid]; +"381 QuantizeLinear_backbone.conf.2.weight_1" -> "382 DequantizeLinear_backbone.conf.2.weight_1" [label="[486, 512, 3, 3]", style=dashed]; +"382 DequantizeLinear_backbone.conf.2.weight_1" -> "383 Conv_393" [label="[486, 512, 3, 3]", style=solid]; +"383 Conv_393" -> "396 Reshape_403" [label="[1, 486, 13, 13]", style=solid]; +"384 Constant_394" -> "386 Gather_396" [label="[]", style=dashed]; +"385 Shape_395" -> "386 Gather_396" [label="[4]", style=dashed]; +"386 Gather_396" -> "390 Unsqueeze_399" [label="[]", style=dashed]; +"387 Constant_397" -> "392 Unsqueeze_400" [label="[]", style=dashed]; +"388 Constant_398" -> "394 Unsqueeze_401" [label="[]", style=dashed]; +"389 Constant_nncf_195" -> "390 Unsqueeze_399" [label="[1]", style=dashed]; +"390 Unsqueeze_399" -> "395 Concat_402" [label="[1]", style=dashed]; +"391 Constant_nncf_197" -> "392 Unsqueeze_400" [label="[1]", style=dashed]; +"392 Unsqueeze_400" -> "395 Concat_402" [label="[1]", style=dashed]; +"393 Constant_nncf_199" -> "394 Unsqueeze_401" [label="[1]", style=dashed]; +"394 Unsqueeze_401" -> "395 Concat_402" [label="[1]", style=dashed]; +"395 Concat_402" -> "396 Reshape_403" [label="[3]", style=dashed]; +"396 Reshape_403" -> "496 Concat_471" [label="[]", style=solid]; +"397 QuantizeLinear_backbone.loc.3.weight_1" -> "398 DequantizeLinear_backbone.loc.3.weight_1" [label="[24, 256, 3, 3]", style=dashed]; +"398 DequantizeLinear_backbone.loc.3.weight_1" -> "399 Conv_404" [label="[24, 256, 3, 3]", style=solid]; +"399 Conv_404" -> "412 Reshape_414" [label="[1, 24, 7, 7]", style=solid]; +"400 Constant_405" -> "402 Gather_407" [label="[]", style=dashed]; +"401 Shape_406" -> "402 Gather_407" [label="[4]", style=dashed]; +"402 Gather_407" -> "406 Unsqueeze_410" [label="[]", style=dashed]; +"403 Constant_408" -> "408 Unsqueeze_411" [label="[]", style=dashed]; +"404 Constant_409" -> "410 Unsqueeze_412" [label="[]", style=dashed]; +"405 Constant_nncf_209" -> "406 Unsqueeze_410" [label="[1]", style=dashed]; +"406 Unsqueeze_410" -> "411 Concat_413" [label="[1]", style=dashed]; +"407 Constant_nncf_211" -> "408 Unsqueeze_411" [label="[1]", style=dashed]; +"408 Unsqueeze_411" -> "411 Concat_413" [label="[1]", style=dashed]; +"409 Constant_nncf_213" -> "410 Unsqueeze_412" [label="[1]", style=dashed]; +"410 Unsqueeze_412" -> "411 Concat_413" [label="[1]", style=dashed]; +"411 Concat_413" -> "412 Reshape_414" [label="[3]", style=dashed]; +"412 Reshape_414" -> "495 Concat_470" [label="[]", style=solid]; +"413 QuantizeLinear_backbone.conf.3.weight_1" -> "414 DequantizeLinear_backbone.conf.3.weight_1" [label="[486, 256, 3, 3]", style=dashed]; +"414 DequantizeLinear_backbone.conf.3.weight_1" -> "415 Conv_415" [label="[486, 256, 3, 3]", style=solid]; +"415 Conv_415" -> "428 Reshape_425" [label="[1, 486, 7, 7]", style=solid]; +"416 Constant_416" -> "418 Gather_418" [label="[]", style=dashed]; +"417 Shape_417" -> "418 Gather_418" [label="[4]", style=dashed]; +"418 Gather_418" -> "422 Unsqueeze_421" [label="[]", style=dashed]; +"419 Constant_419" -> "424 Unsqueeze_422" [label="[]", style=dashed]; +"420 Constant_420" -> "426 Unsqueeze_423" [label="[]", style=dashed]; +"421 Constant_nncf_223" -> "422 Unsqueeze_421" [label="[1]", style=dashed]; +"422 Unsqueeze_421" -> "427 Concat_424" [label="[1]", style=dashed]; +"423 Constant_nncf_225" -> "424 Unsqueeze_422" [label="[1]", style=dashed]; +"424 Unsqueeze_422" -> "427 Concat_424" [label="[1]", style=dashed]; +"425 Constant_nncf_227" -> "426 Unsqueeze_423" [label="[1]", style=dashed]; +"426 Unsqueeze_423" -> "427 Concat_424" [label="[1]", style=dashed]; +"427 Concat_424" -> "428 Reshape_425" [label="[3]", style=dashed]; +"428 Reshape_425" -> "496 Concat_471" [label="[]", style=solid]; +"429 QuantizeLinear_backbone.loc.4.weight_1" -> "430 DequantizeLinear_backbone.loc.4.weight_1" [label="[16, 256, 3, 3]", style=dashed]; +"430 DequantizeLinear_backbone.loc.4.weight_1" -> "431 Conv_426" [label="[16, 256, 3, 3]", style=solid]; +"431 Conv_426" -> "444 Reshape_436" [label="[1, 16, 3, 3]", style=solid]; +"432 Constant_427" -> "434 Gather_429" [label="[]", style=dashed]; +"433 Shape_428" -> "434 Gather_429" [label="[4]", style=dashed]; +"434 Gather_429" -> "438 Unsqueeze_432" [label="[]", style=dashed]; +"435 Constant_430" -> "440 Unsqueeze_433" [label="[]", style=dashed]; +"436 Constant_431" -> "442 Unsqueeze_434" [label="[]", style=dashed]; +"437 Constant_nncf_237" -> "438 Unsqueeze_432" [label="[1]", style=dashed]; +"438 Unsqueeze_432" -> "443 Concat_435" [label="[1]", style=dashed]; +"439 Constant_nncf_239" -> "440 Unsqueeze_433" [label="[1]", style=dashed]; +"440 Unsqueeze_433" -> "443 Concat_435" [label="[1]", style=dashed]; +"441 Constant_nncf_241" -> "442 Unsqueeze_434" [label="[1]", style=dashed]; +"442 Unsqueeze_434" -> "443 Concat_435" [label="[1]", style=dashed]; +"443 Concat_435" -> "444 Reshape_436" [label="[3]", style=dashed]; +"444 Reshape_436" -> "495 Concat_470" [label="[]", style=solid]; +"445 QuantizeLinear_backbone.conf.4.weight_1" -> "446 DequantizeLinear_backbone.conf.4.weight_1" [label="[324, 256, 3, 3]", style=dashed]; +"446 DequantizeLinear_backbone.conf.4.weight_1" -> "447 Conv_437" [label="[324, 256, 3, 3]", style=solid]; +"447 Conv_437" -> "460 Reshape_447" [label="[1, 324, 3, 3]", style=solid]; +"448 Constant_438" -> "450 Gather_440" [label="[]", style=dashed]; +"449 Shape_439" -> "450 Gather_440" [label="[4]", style=dashed]; +"450 Gather_440" -> "454 Unsqueeze_443" [label="[]", style=dashed]; +"451 Constant_441" -> "456 Unsqueeze_444" [label="[]", style=dashed]; +"452 Constant_442" -> "458 Unsqueeze_445" [label="[]", style=dashed]; +"453 Constant_nncf_251" -> "454 Unsqueeze_443" [label="[1]", style=dashed]; +"454 Unsqueeze_443" -> "459 Concat_446" [label="[1]", style=dashed]; +"455 Constant_nncf_253" -> "456 Unsqueeze_444" [label="[1]", style=dashed]; +"456 Unsqueeze_444" -> "459 Concat_446" [label="[1]", style=dashed]; +"457 Constant_nncf_255" -> "458 Unsqueeze_445" [label="[1]", style=dashed]; +"458 Unsqueeze_445" -> "459 Concat_446" [label="[1]", style=dashed]; +"459 Concat_446" -> "460 Reshape_447" [label="[3]", style=dashed]; +"460 Reshape_447" -> "496 Concat_471" [label="[]", style=solid]; +"461 QuantizeLinear_Relu_337_1" -> "462 DequantizeLinear_Relu_337_1" [label="[1, 256, 7, 7]", style=dashed]; +"462 DequantizeLinear_Relu_337_1" -> "465 Conv_448" [label="[1, 256, 7, 7]", style=solid]; +"462 DequantizeLinear_Relu_337_1" -> "467 Shape_450" [label="[1, 256, 7, 7]", style=solid]; +"462 DequantizeLinear_Relu_337_1" -> "481 Conv_459" [label="[1, 256, 7, 7]", style=solid]; +"462 DequantizeLinear_Relu_337_1" -> "483 Shape_461" [label="[1, 256, 7, 7]", style=solid]; +"463 QuantizeLinear_backbone.loc.5.weight_1" -> "464 DequantizeLinear_backbone.loc.5.weight_1" [label="[16, 256, 3, 3]", style=dashed]; +"464 DequantizeLinear_backbone.loc.5.weight_1" -> "465 Conv_448" [label="[16, 256, 3, 3]", style=solid]; +"465 Conv_448" -> "478 Reshape_458" [label="[1, 16, 3, 3]", style=solid]; +"466 Constant_449" -> "468 Gather_451" [label="[]", style=dashed]; +"467 Shape_450" -> "468 Gather_451" [label="[4]", style=dashed]; +"468 Gather_451" -> "472 Unsqueeze_454" [label="[]", style=dashed]; +"469 Constant_452" -> "474 Unsqueeze_455" [label="[]", style=dashed]; +"470 Constant_453" -> "476 Unsqueeze_456" [label="[]", style=dashed]; +"471 Constant_nncf_265" -> "472 Unsqueeze_454" [label="[1]", style=dashed]; +"472 Unsqueeze_454" -> "477 Concat_457" [label="[1]", style=dashed]; +"473 Constant_nncf_267" -> "474 Unsqueeze_455" [label="[1]", style=dashed]; +"474 Unsqueeze_455" -> "477 Concat_457" [label="[1]", style=dashed]; +"475 Constant_nncf_269" -> "476 Unsqueeze_456" [label="[1]", style=dashed]; +"476 Unsqueeze_456" -> "477 Concat_457" [label="[1]", style=dashed]; +"477 Concat_457" -> "478 Reshape_458" [label="[3]", style=dashed]; +"478 Reshape_458" -> "495 Concat_470" [label="[]", style=solid]; +"479 QuantizeLinear_backbone.conf.5.weight_1" -> "480 DequantizeLinear_backbone.conf.5.weight_1" [label="[324, 256, 3, 3]", style=dashed]; +"480 DequantizeLinear_backbone.conf.5.weight_1" -> "481 Conv_459" [label="[324, 256, 3, 3]", style=solid]; +"481 Conv_459" -> "494 Reshape_469" [label="[1, 324, 3, 3]", style=solid]; +"482 Constant_460" -> "484 Gather_462" [label="[]", style=dashed]; +"483 Shape_461" -> "484 Gather_462" [label="[4]", style=dashed]; +"484 Gather_462" -> "488 Unsqueeze_465" [label="[]", style=dashed]; +"485 Constant_463" -> "490 Unsqueeze_466" [label="[]", style=dashed]; +"486 Constant_464" -> "492 Unsqueeze_467" [label="[]", style=dashed]; +"487 Constant_nncf_279" -> "488 Unsqueeze_465" [label="[1]", style=dashed]; +"488 Unsqueeze_465" -> "493 Concat_468" [label="[1]", style=dashed]; +"489 Constant_nncf_281" -> "490 Unsqueeze_466" [label="[1]", style=dashed]; +"490 Unsqueeze_466" -> "493 Concat_468" [label="[1]", style=dashed]; +"491 Constant_nncf_283" -> "492 Unsqueeze_467" [label="[1]", style=dashed]; +"492 Unsqueeze_467" -> "493 Concat_468" [label="[1]", style=dashed]; +"493 Concat_468" -> "494 Reshape_469" [label="[3]", style=dashed]; +"494 Reshape_469" -> "496 Concat_471" [label="[]", style=solid]; +"495 Concat_470" -> "497 Transpose_472" [label="[]", style=solid]; +"496 Concat_471" -> "498 Transpose_473" [label="[]", style=solid]; +"497 Transpose_472" -> "503 Slice_478" [label="[]", style=solid]; +"497 Transpose_472" -> "520 Slice_495" [label="[]", style=solid]; +"498 Transpose_473" -> "697 Shape_nncf_489" [label="[]", style=solid]; +"498 Transpose_473" -> "698 Flatten_nncf_490" [label="[]", style=solid]; +"499 Constant_474" -> "503 Slice_478" [label="[1]", style=dashed]; +"500 Constant_475" -> "503 Slice_478" [label="[1]", style=dashed]; +"501 Constant_476" -> "503 Slice_478" [label="[1]", style=dashed]; +"502 Constant_477" -> "503 Slice_478" [label="[1]", style=dashed]; +"503 Slice_478" -> "508 Slice_483" [label="[]", style=solid]; +"504 Constant_479" -> "508 Slice_483" [label="[1]", style=dashed]; +"505 Constant_480" -> "508 Slice_483" [label="[1]", style=dashed]; +"506 Constant_481" -> "508 Slice_483" [label="[1]", style=dashed]; +"507 Constant_482" -> "508 Slice_483" [label="[1]", style=dashed]; +"508 Slice_483" -> "513 Slice_488" [label="[]", style=solid]; +"509 Constant_484" -> "513 Slice_488" [label="[1]", style=dashed]; +"510 Constant_485" -> "513 Slice_488" [label="[1]", style=dashed]; +"511 Constant_486" -> "513 Slice_488" [label="[1]", style=dashed]; +"512 Constant_487" -> "513 Slice_488" [label="[1]", style=dashed]; +"513 Slice_488" -> "515 Mul_490" [label="[]", style=solid]; +"514 Constant_489" -> "515 Mul_490" [label="[]", style=solid]; +"515 Mul_490" -> "534 Mul_509" [label="[]", style=solid]; +"516 Constant_491" -> "520 Slice_495" [label="[1]", style=dashed]; +"517 Constant_492" -> "520 Slice_495" [label="[1]", style=dashed]; +"518 Constant_493" -> "520 Slice_495" [label="[1]", style=dashed]; +"519 Constant_494" -> "520 Slice_495" [label="[1]", style=dashed]; +"520 Slice_495" -> "525 Slice_500" [label="[]", style=solid]; +"521 Constant_496" -> "525 Slice_500" [label="[1]", style=dashed]; +"522 Constant_497" -> "525 Slice_500" [label="[1]", style=dashed]; +"523 Constant_498" -> "525 Slice_500" [label="[1]", style=dashed]; +"524 Constant_499" -> "525 Slice_500" [label="[1]", style=dashed]; +"525 Slice_500" -> "530 Slice_505" [label="[]", style=solid]; +"526 Constant_501" -> "530 Slice_505" [label="[1]", style=dashed]; +"527 Constant_502" -> "530 Slice_505" [label="[1]", style=dashed]; +"528 Constant_503" -> "530 Slice_505" [label="[1]", style=dashed]; +"529 Constant_504" -> "530 Slice_505" [label="[1]", style=dashed]; +"530 Slice_505" -> "532 Mul_507" [label="[]", style=solid]; +"531 Constant_506" -> "532 Mul_507" [label="[]", style=solid]; +"532 Mul_507" -> "537 Exp_512" [label="[]", style=solid]; +"533 Constant_508" -> "534 Mul_509" [label="[1, 15130, 2]", style=solid]; +"534 Mul_509" -> "536 Add_511" [label="[]", style=solid]; +"535 Constant_510" -> "536 Add_511" [label="[1, 15130, 2]", style=solid]; +"536 Add_511" -> "544 Slice_519" [label="[]", style=solid]; +"536 Add_511" -> "581 Slice_554" [label="[]", style=solid]; +"536 Add_511" -> "618 Slice_589" [label="[]", style=solid]; +"536 Add_511" -> "655 Slice_624" [label="[]", style=solid]; +"537 Exp_512" -> "539 Mul_514" [label="[]", style=solid]; +"538 Constant_513" -> "539 Mul_514" [label="[1, 15130, 2]", style=solid]; +"539 Mul_514" -> "561 Slice_535" [label="[]", style=solid]; +"539 Mul_514" -> "598 Slice_570" [label="[]", style=solid]; +"539 Mul_514" -> "635 Slice_605" [label="[]", style=solid]; +"539 Mul_514" -> "672 Slice_640" [label="[]", style=solid]; +"540 Constant_515" -> "544 Slice_519" [label="[1]", style=dashed]; +"541 Constant_516" -> "544 Slice_519" [label="[1]", style=dashed]; +"542 Constant_517" -> "544 Slice_519" [label="[1]", style=dashed]; +"543 Constant_518" -> "544 Slice_519" [label="[1]", style=dashed]; +"544 Slice_519" -> "549 Slice_524" [label="[]", style=solid]; +"545 Constant_520" -> "549 Slice_524" [label="[1]", style=dashed]; +"546 Constant_521" -> "549 Slice_524" [label="[1]", style=dashed]; +"547 Constant_522" -> "549 Slice_524" [label="[1]", style=dashed]; +"548 Constant_523" -> "549 Slice_524" [label="[1]", style=dashed]; +"549 Slice_524" -> "554 Slice_529" [label="[]", style=solid]; +"550 Constant_525" -> "554 Slice_529" [label="[1]", style=dashed]; +"551 Constant_526" -> "554 Slice_529" [label="[1]", style=dashed]; +"552 Constant_527" -> "554 Slice_529" [label="[1]", style=dashed]; +"553 Constant_528" -> "554 Slice_529" [label="[1]", style=dashed]; +"554 Slice_529" -> "556 Squeeze_530" [label="[]", style=solid]; +"555 Constant_nncf_347" -> "556 Squeeze_530" [label="[1]", style=dashed]; +"556 Squeeze_530" -> "576 Sub_549" [label="[]", style=solid]; +"557 Constant_531" -> "561 Slice_535" [label="[1]", style=dashed]; +"558 Constant_532" -> "561 Slice_535" [label="[1]", style=dashed]; +"559 Constant_533" -> "561 Slice_535" [label="[1]", style=dashed]; +"560 Constant_534" -> "561 Slice_535" [label="[1]", style=dashed]; +"561 Slice_535" -> "566 Slice_540" [label="[]", style=solid]; +"562 Constant_536" -> "566 Slice_540" [label="[1]", style=dashed]; +"563 Constant_537" -> "566 Slice_540" [label="[1]", style=dashed]; +"564 Constant_538" -> "566 Slice_540" [label="[1]", style=dashed]; +"565 Constant_539" -> "566 Slice_540" [label="[1]", style=dashed]; +"566 Slice_540" -> "571 Slice_545" [label="[]", style=solid]; +"567 Constant_541" -> "571 Slice_545" [label="[1]", style=dashed]; +"568 Constant_542" -> "571 Slice_545" [label="[1]", style=dashed]; +"569 Constant_543" -> "571 Slice_545" [label="[1]", style=dashed]; +"570 Constant_544" -> "571 Slice_545" [label="[1]", style=dashed]; +"571 Slice_545" -> "573 Squeeze_546" [label="[]", style=solid]; +"572 Constant_nncf_364" -> "573 Squeeze_546" [label="[1]", style=dashed]; +"573 Squeeze_546" -> "575 Mul_548" [label="[]", style=solid]; +"574 Constant_547" -> "575 Mul_548" [label="[]", style=solid]; +"575 Mul_548" -> "576 Sub_549" [label="[]", style=solid]; +"576 Sub_549" -> "689 Unsqueeze_655" [label="[]", style=solid]; +"577 Constant_550" -> "581 Slice_554" [label="[1]", style=dashed]; +"578 Constant_551" -> "581 Slice_554" [label="[1]", style=dashed]; +"579 Constant_552" -> "581 Slice_554" [label="[1]", style=dashed]; +"580 Constant_553" -> "581 Slice_554" [label="[1]", style=dashed]; +"581 Slice_554" -> "586 Slice_559" [label="[]", style=solid]; +"582 Constant_555" -> "586 Slice_559" [label="[1]", style=dashed]; +"583 Constant_556" -> "586 Slice_559" [label="[1]", style=dashed]; +"584 Constant_557" -> "586 Slice_559" [label="[1]", style=dashed]; +"585 Constant_558" -> "586 Slice_559" [label="[1]", style=dashed]; +"586 Slice_559" -> "591 Slice_564" [label="[]", style=solid]; +"587 Constant_560" -> "591 Slice_564" [label="[1]", style=dashed]; +"588 Constant_561" -> "591 Slice_564" [label="[1]", style=dashed]; +"589 Constant_562" -> "591 Slice_564" [label="[1]", style=dashed]; +"590 Constant_563" -> "591 Slice_564" [label="[1]", style=dashed]; +"591 Slice_564" -> "593 Squeeze_565" [label="[]", style=solid]; +"592 Constant_nncf_384" -> "593 Squeeze_565" [label="[1]", style=dashed]; +"593 Squeeze_565" -> "613 Sub_584" [label="[]", style=solid]; +"594 Constant_566" -> "598 Slice_570" [label="[1]", style=dashed]; +"595 Constant_567" -> "598 Slice_570" [label="[1]", style=dashed]; +"596 Constant_568" -> "598 Slice_570" [label="[1]", style=dashed]; +"597 Constant_569" -> "598 Slice_570" [label="[1]", style=dashed]; +"598 Slice_570" -> "603 Slice_575" [label="[]", style=solid]; +"599 Constant_571" -> "603 Slice_575" [label="[1]", style=dashed]; +"600 Constant_572" -> "603 Slice_575" [label="[1]", style=dashed]; +"601 Constant_573" -> "603 Slice_575" [label="[1]", style=dashed]; +"602 Constant_574" -> "603 Slice_575" [label="[1]", style=dashed]; +"603 Slice_575" -> "608 Slice_580" [label="[]", style=solid]; +"604 Constant_576" -> "608 Slice_580" [label="[1]", style=dashed]; +"605 Constant_577" -> "608 Slice_580" [label="[1]", style=dashed]; +"606 Constant_578" -> "608 Slice_580" [label="[1]", style=dashed]; +"607 Constant_579" -> "608 Slice_580" [label="[1]", style=dashed]; +"608 Slice_580" -> "610 Squeeze_581" [label="[]", style=solid]; +"609 Constant_nncf_401" -> "610 Squeeze_581" [label="[1]", style=dashed]; +"610 Squeeze_581" -> "612 Mul_583" [label="[]", style=solid]; +"611 Constant_582" -> "612 Mul_583" [label="[]", style=solid]; +"612 Mul_583" -> "613 Sub_584" [label="[]", style=solid]; +"613 Sub_584" -> "691 Unsqueeze_656" [label="[]", style=solid]; +"614 Constant_585" -> "618 Slice_589" [label="[1]", style=dashed]; +"615 Constant_586" -> "618 Slice_589" [label="[1]", style=dashed]; +"616 Constant_587" -> "618 Slice_589" [label="[1]", style=dashed]; +"617 Constant_588" -> "618 Slice_589" [label="[1]", style=dashed]; +"618 Slice_589" -> "623 Slice_594" [label="[]", style=solid]; +"619 Constant_590" -> "623 Slice_594" [label="[1]", style=dashed]; +"620 Constant_591" -> "623 Slice_594" [label="[1]", style=dashed]; +"621 Constant_592" -> "623 Slice_594" [label="[1]", style=dashed]; +"622 Constant_593" -> "623 Slice_594" [label="[1]", style=dashed]; +"623 Slice_594" -> "628 Slice_599" [label="[]", style=solid]; +"624 Constant_595" -> "628 Slice_599" [label="[1]", style=dashed]; +"625 Constant_596" -> "628 Slice_599" [label="[1]", style=dashed]; +"626 Constant_597" -> "628 Slice_599" [label="[1]", style=dashed]; +"627 Constant_598" -> "628 Slice_599" [label="[1]", style=dashed]; +"628 Slice_599" -> "630 Squeeze_600" [label="[]", style=solid]; +"629 Constant_nncf_421" -> "630 Squeeze_600" [label="[1]", style=dashed]; +"630 Squeeze_600" -> "650 Add_619" [label="[]", style=solid]; +"631 Constant_601" -> "635 Slice_605" [label="[1]", style=dashed]; +"632 Constant_602" -> "635 Slice_605" [label="[1]", style=dashed]; +"633 Constant_603" -> "635 Slice_605" [label="[1]", style=dashed]; +"634 Constant_604" -> "635 Slice_605" [label="[1]", style=dashed]; +"635 Slice_605" -> "640 Slice_610" [label="[]", style=solid]; +"636 Constant_606" -> "640 Slice_610" [label="[1]", style=dashed]; +"637 Constant_607" -> "640 Slice_610" [label="[1]", style=dashed]; +"638 Constant_608" -> "640 Slice_610" [label="[1]", style=dashed]; +"639 Constant_609" -> "640 Slice_610" [label="[1]", style=dashed]; +"640 Slice_610" -> "645 Slice_615" [label="[]", style=solid]; +"641 Constant_611" -> "645 Slice_615" [label="[1]", style=dashed]; +"642 Constant_612" -> "645 Slice_615" [label="[1]", style=dashed]; +"643 Constant_613" -> "645 Slice_615" [label="[1]", style=dashed]; +"644 Constant_614" -> "645 Slice_615" [label="[1]", style=dashed]; +"645 Slice_615" -> "647 Squeeze_616" [label="[]", style=solid]; +"646 Constant_nncf_438" -> "647 Squeeze_616" [label="[1]", style=dashed]; +"647 Squeeze_616" -> "649 Mul_618" [label="[]", style=solid]; +"648 Constant_617" -> "649 Mul_618" [label="[]", style=solid]; +"649 Mul_618" -> "650 Add_619" [label="[]", style=solid]; +"650 Add_619" -> "693 Unsqueeze_657" [label="[]", style=solid]; +"651 Constant_620" -> "655 Slice_624" [label="[1]", style=dashed]; +"652 Constant_621" -> "655 Slice_624" [label="[1]", style=dashed]; +"653 Constant_622" -> "655 Slice_624" [label="[1]", style=dashed]; +"654 Constant_623" -> "655 Slice_624" [label="[1]", style=dashed]; +"655 Slice_624" -> "660 Slice_629" [label="[]", style=solid]; +"656 Constant_625" -> "660 Slice_629" [label="[1]", style=dashed]; +"657 Constant_626" -> "660 Slice_629" [label="[1]", style=dashed]; +"658 Constant_627" -> "660 Slice_629" [label="[1]", style=dashed]; +"659 Constant_628" -> "660 Slice_629" [label="[1]", style=dashed]; +"660 Slice_629" -> "665 Slice_634" [label="[]", style=solid]; +"661 Constant_630" -> "665 Slice_634" [label="[1]", style=dashed]; +"662 Constant_631" -> "665 Slice_634" [label="[1]", style=dashed]; +"663 Constant_632" -> "665 Slice_634" [label="[1]", style=dashed]; +"664 Constant_633" -> "665 Slice_634" [label="[1]", style=dashed]; +"665 Slice_634" -> "667 Squeeze_635" [label="[]", style=solid]; +"666 Constant_nncf_458" -> "667 Squeeze_635" [label="[1]", style=dashed]; +"667 Squeeze_635" -> "687 Add_654" [label="[]", style=solid]; +"668 Constant_636" -> "672 Slice_640" [label="[1]", style=dashed]; +"669 Constant_637" -> "672 Slice_640" [label="[1]", style=dashed]; +"670 Constant_638" -> "672 Slice_640" [label="[1]", style=dashed]; +"671 Constant_639" -> "672 Slice_640" [label="[1]", style=dashed]; +"672 Slice_640" -> "677 Slice_645" [label="[]", style=solid]; +"673 Constant_641" -> "677 Slice_645" [label="[1]", style=dashed]; +"674 Constant_642" -> "677 Slice_645" [label="[1]", style=dashed]; +"675 Constant_643" -> "677 Slice_645" [label="[1]", style=dashed]; +"676 Constant_644" -> "677 Slice_645" [label="[1]", style=dashed]; +"677 Slice_645" -> "682 Slice_650" [label="[]", style=solid]; +"678 Constant_646" -> "682 Slice_650" [label="[1]", style=dashed]; +"679 Constant_647" -> "682 Slice_650" [label="[1]", style=dashed]; +"680 Constant_648" -> "682 Slice_650" [label="[1]", style=dashed]; +"681 Constant_649" -> "682 Slice_650" [label="[1]", style=dashed]; +"682 Slice_650" -> "684 Squeeze_651" [label="[]", style=solid]; +"683 Constant_nncf_475" -> "684 Squeeze_651" [label="[1]", style=dashed]; +"684 Squeeze_651" -> "686 Mul_653" [label="[]", style=solid]; +"685 Constant_652" -> "686 Mul_653" [label="[]", style=solid]; +"686 Mul_653" -> "687 Add_654" [label="[]", style=solid]; +"687 Add_654" -> "695 Unsqueeze_658" [label="[]", style=solid]; +"688 Constant_nncf_480" -> "689 Unsqueeze_655" [label="[1]", style=dashed]; +"689 Unsqueeze_655" -> "696 Concat_659" [label="[]", style=solid]; +"690 Constant_nncf_482" -> "691 Unsqueeze_656" [label="[1]", style=dashed]; +"691 Unsqueeze_656" -> "696 Concat_659" [label="[]", style=solid]; +"692 Constant_nncf_484" -> "693 Unsqueeze_657" [label="[1]", style=dashed]; +"693 Unsqueeze_657" -> "696 Concat_659" [label="[]", style=solid]; +"694 Constant_nncf_486" -> "695 Unsqueeze_658" [label="[1]", style=dashed]; +"695 Unsqueeze_658" -> "696 Concat_659" [label="[]", style=solid]; +"696 Concat_659" -> "723 NonMaxSuppression_683" [label="[]", style=solid]; +"696 Concat_659" -> "761 Squeeze_719" [label="[]", style=solid]; +"697 Shape_nncf_489" -> "700 Reshape_nncf_492" [label="[-1]", style=dashed]; +"698 Flatten_nncf_490" -> "699 Softmax_660" [label="[]", style=solid]; +"699 Softmax_660" -> "700 Reshape_nncf_492" [label="[]", style=solid]; +"700 Reshape_nncf_492" -> "701 Transpose_661" [label="[]", style=solid]; +"701 Transpose_661" -> "706 Slice_666" [label="[]", style=solid]; +"702 Constant_662" -> "706 Slice_666" [label="[1]", style=dashed]; +"703 Constant_663" -> "706 Slice_666" [label="[1]", style=dashed]; +"704 Constant_664" -> "706 Slice_666" [label="[1]", style=dashed]; +"705 Constant_665" -> "706 Slice_666" [label="[1]", style=dashed]; +"706 Slice_666" -> "711 Slice_671" [label="[]", style=solid]; +"707 Constant_667" -> "711 Slice_671" [label="[1]", style=dashed]; +"708 Constant_668" -> "711 Slice_671" [label="[1]", style=dashed]; +"709 Constant_669" -> "711 Slice_671" [label="[1]", style=dashed]; +"710 Constant_670" -> "711 Slice_671" [label="[1]", style=dashed]; +"711 Slice_671" -> "716 Slice_676" [label="[]", style=solid]; +"712 Constant_672" -> "716 Slice_676" [label="[1]", style=dashed]; +"713 Constant_673" -> "716 Slice_676" [label="[1]", style=dashed]; +"714 Constant_674" -> "716 Slice_676" [label="[1]", style=dashed]; +"715 Constant_675" -> "716 Slice_676" [label="[1]", style=dashed]; +"716 Slice_676" -> "723 NonMaxSuppression_683" [label="[]", style=solid]; +"716 Slice_676" -> "739 Reshape_699" [label="[]", style=solid]; +"716 Slice_676" -> "740 Shape_700" [label="[]", style=solid]; +"717 Constant_677" -> "718 ConstantOfShape_678" [label="[1]", style=dashed]; +"718 ConstantOfShape_678" -> "723 NonMaxSuppression_683" [label="[1]", style=dashed]; +"719 Constant_679" -> "720 ConstantOfShape_680" [label="[1]", style=dashed]; +"720 ConstantOfShape_680" -> "723 NonMaxSuppression_683" [label="[1]", style=solid]; +"721 Constant_681" -> "722 ConstantOfShape_682" [label="[1]", style=dashed]; +"722 ConstantOfShape_682" -> "723 NonMaxSuppression_683" [label="[1]", style=solid]; +"723 NonMaxSuppression_683" -> "728 Slice_688" [label="[-1, 3]", style=dashed]; +"723 NonMaxSuppression_683" -> "735 Slice_695" [label="[-1, 3]", style=dashed]; +"724 Constant_684" -> "728 Slice_688" [label="[1]", style=dashed]; +"725 Constant_685" -> "728 Slice_688" [label="[1]", style=dashed]; +"726 Constant_686" -> "728 Slice_688" [label="[1]", style=dashed]; +"727 Constant_687" -> "728 Slice_688" [label="[1]", style=dashed]; +"728 Slice_688" -> "730 Gather_690" [label="[-1, 3]", style=dashed]; +"729 Constant_689" -> "730 Gather_690" [label="[]", style=dashed]; +"730 Gather_690" -> "743 Mul_703" [label="[-1]", style=dashed]; +"730 Gather_690" -> "772 Gather_729" [label="[-1]", style=dashed]; +"731 Constant_691" -> "735 Slice_695" [label="[1]", style=dashed]; +"732 Constant_692" -> "735 Slice_695" [label="[1]", style=dashed]; +"733 Constant_693" -> "735 Slice_695" [label="[1]", style=dashed]; +"734 Constant_694" -> "735 Slice_695" [label="[1]", style=dashed]; +"735 Slice_695" -> "737 Gather_697" [label="[-1, 3]", style=dashed]; +"736 Constant_696" -> "737 Gather_697" [label="[]", style=dashed]; +"737 Gather_697" -> "744 Add_704" [label="[-1]", style=dashed]; +"737 Gather_697" -> "762 Gather_720" [label="[-1]", style=dashed]; +"738 Constant_698" -> "739 Reshape_699" [label="[1]", style=dashed]; +"739 Reshape_699" -> "746 Gather_706" [label="[-1]", style=solid]; +"740 Shape_700" -> "742 Gather_702" [label="[-1]", style=dashed]; +"741 Constant_701" -> "742 Gather_702" [label="[]", style=dashed]; +"742 Gather_702" -> "743 Mul_703" [label="[]", style=dashed]; +"743 Mul_703" -> "744 Add_704" [label="[-1]", style=dashed]; +"744 Add_704" -> "745 Cast_705" [label="[-1]", style=dashed]; +"745 Cast_705" -> "746 Gather_706" [label="[-1]", style=dashed]; +"746 Gather_706" -> "747 Shape_707" [label="[-1]", style=solid]; +"746 Gather_706" -> "759 TopK_717" [label="[-1]", style=solid]; +"746 Gather_706" -> "777 Gather_733" [label="[-1]", style=solid]; +"747 Shape_707" -> "749 Gather_709" [label="[1]", style=dashed]; +"748 Constant_708" -> "749 Gather_709" [label="[]", style=dashed]; +"749 Gather_709" -> "751 Unsqueeze_710" [label="[]", style=dashed]; +"750 Constant_nncf_542" -> "751 Unsqueeze_710" [label="[1]", style=dashed]; +"751 Unsqueeze_710" -> "753 Concat_712" [label="[1]", style=dashed]; +"752 Constant_711" -> "753 Concat_712" [label="[1]", style=dashed]; +"753 Concat_712" -> "754 Cast_713" [label="[2]", style=dashed]; +"754 Cast_713" -> "755 ReduceMin_714" [label="[2]", style=dashed]; +"755 ReduceMin_714" -> "756 Cast_715" [label="[]", style=dashed]; +"756 Cast_715" -> "758 Unsqueeze_716" [label="[]", style=dashed]; +"757 Constant_nncf_549" -> "758 Unsqueeze_716" [label="[1]", style=dashed]; +"758 Unsqueeze_716" -> "759 TopK_717" [label="[1]", style=dashed]; +"759 TopK_717" -> "762 Gather_720" [label="[-1]", style=dashed]; +"759 TopK_717" -> "772 Gather_729" [label="[-1]", style=dashed]; +"759 TopK_717" -> "777 Gather_733" [label="[-1]", style=dashed]; +"760 Constant_nncf_552" -> "761 Squeeze_719" [label="[1]", style=dashed]; +"761 Squeeze_719" -> "767 Slice_725" [label="[]", style=solid]; +"762 Gather_720" -> "768 Cast_726" [label="[-1]", style=dashed]; +"763 Constant_721" -> "767 Slice_725" [label="[1]", style=dashed]; +"764 Constant_722" -> "767 Slice_725" [label="[1]", style=dashed]; +"765 Constant_723" -> "767 Slice_725" [label="[1]", style=dashed]; +"766 Constant_724" -> "767 Slice_725" [label="[1]", style=dashed]; +"767 Slice_725" -> "769 Gather_727" [label="[]", style=solid]; +"768 Cast_726" -> "769 Gather_727" [label="[-1]", style=dashed]; +"769 Gather_727" -> "771 Unsqueeze_bboxes" [label="[]", style=solid]; +"770 Constant_nncf_562" -> "771 Unsqueeze_bboxes" [label="[1]", style=dashed]; +"771 Unsqueeze_bboxes" -> "781 nncf_model_output_0" [label="[1, -1, 4]", style=solid]; +"772 Gather_729" -> "774 Unsqueeze_730" [label="[-1]", style=dashed]; +"773 Constant_nncf_565" -> "774 Unsqueeze_730" [label="[1]", style=dashed]; +"774 Unsqueeze_730" -> "776 Add_labels" [label="[1, -1]", style=dashed]; +"775 Constant_731" -> "776 Add_labels" [label="[]", style=dashed]; +"776 Add_labels" -> "782 nncf_model_output_1" [label="[1, -1]", style=dashed]; +"777 Gather_733" -> "779 Unsqueeze_scores" [label="[-1]", style=solid]; +"778 Constant_nncf_570" -> "779 Unsqueeze_scores" [label="[1]", style=dashed]; +"779 Unsqueeze_scores" -> "783 nncf_model_output_2" [label="[1, -1]", style=solid]; +"780 nncf_model_input_0" -> "0 QuantizeLinear_image_1" [label="[1, 3, 1200, 1200]", style=solid]; } diff --git a/tests/onnx/data/reference_graphs/quantization/synthetic/shape_of_model.dot b/tests/onnx/data/reference_graphs/quantization/synthetic/shape_of_model.dot index 5423d415b6b..2ac000f9eaa 100644 --- a/tests/onnx/data/reference_graphs/quantization/synthetic/shape_of_model.dot +++ b/tests/onnx/data/reference_graphs/quantization/synthetic/shape_of_model.dot @@ -32,7 +32,7 @@ strict digraph { "10 Cast1" -> "11 Cast2" [label="[]", style=dashed]; "11 Cast2" -> "12 Sqrt" [label="[]", style=solid]; "12 Sqrt" -> "13 Reshape" [label="[]", style=solid]; -"13 Reshape" -> "16 Conv2" [label="[1]", style=solid]; +"13 Reshape" -> "16 Conv2" [label="[]", style=solid]; "14 QuantizeLinear_Conv2_W_1" -> "15 DequantizeLinear_Conv2_W_1" [label="[10, 32, 1, 1]", style=dashed]; "15 DequantizeLinear_Conv2_W_1" -> "16 Conv2" [label="[10, 32, 1, 1]", style=solid]; "16 Conv2" -> "18 nncf_model_output_0" [label="[1, 10, 1, 1]", style=solid]; diff --git a/tests/onnx/data/reference_scales/linear_model_mixed.json b/tests/onnx/data/reference_scales/linear_model_mixed.json index 2cbfd2a51d8..8f2c40e5ffb 100644 --- a/tests/onnx/data/reference_scales/linear_model_mixed.json +++ b/tests/onnx/data/reference_scales/linear_model_mixed.json @@ -1,6 +1,6 @@ { "QuantizeLinear_X_1": { - "scale": 0.00786584708839655, + "scale": 0.007865846157073975, "zero_point": -1 }, "QuantizeLinear_Conv1_W_1": { diff --git a/tests/onnx/data/reference_scales/one_depthwise_convolutional_model_mixed.json b/tests/onnx/data/reference_scales/one_depthwise_convolutional_model_mixed.json index a4a2295cf2e..b062a9b0ebe 100644 --- a/tests/onnx/data/reference_scales/one_depthwise_convolutional_model_mixed.json +++ b/tests/onnx/data/reference_scales/one_depthwise_convolutional_model_mixed.json @@ -1,7 +1,7 @@ { "QuantizeLinear_X_1": { "scale": [ - 0.007609957829117775, + 0.007609957363456488, 0.007633729372173548, 0.007594745140522718 ], diff --git a/tests/onnx/models.py b/tests/onnx/models.py index 98a43bfe984..0f8c5f2adc9 100644 --- a/tests/onnx/models.py +++ b/tests/onnx/models.py @@ -33,6 +33,7 @@ def create_initializer_tensor( class ONNXReferenceModel: def __init__(self, onnx_model, input_shape: List[List[int]], graph_path): self.onnx_model = onnx_model + self.onnx_model.ir_version = 9 self.input_shape = input_shape self.path_ref_graph = graph_path diff --git a/tests/onnx/quantization/test_bias_correction.py b/tests/onnx/quantization/test_bias_correction.py index 948519ad39d..2fef33e00b3 100644 --- a/tests/onnx/quantization/test_bias_correction.py +++ b/tests/onnx/quantization/test_bias_correction.py @@ -212,7 +212,7 @@ def test__get_subgraph_data_for_node(self, quantized_test_model, layer_name, ref MultipleConvTestModel, { ("/conv_1/Conv", 0): ("nncf_model_input_0", 0), - ("/conv_3/Conv", 0): ("nncf_model_input_0", 1), + ("/conv_3/Conv", 0): ("nncf_model_input_0", 0), }, ), (ConvTestModel, {("/conv/Conv", 0): ("nncf_model_input_0", 0)}), diff --git a/tests/onnx/quantization/test_min_max.py b/tests/onnx/quantization/test_min_max.py index 53a6700f3a7..75ce070bef4 100644 --- a/tests/onnx/quantization/test_min_max.py +++ b/tests/onnx/quantization/test_min_max.py @@ -8,215 +8,74 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from dataclasses import dataclass -from typing import List +from typing import Tuple import pytest -import nncf.onnx.graph.metatypes.onnx_metatypes as om -from nncf.common.graph.graph import NNCFNode +from nncf.common.graph.graph import NNCFGraph from nncf.common.graph.transformations.commands import TargetType +from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXConvolutionMetatype +from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXDepthwiseConvolutionMetatype +from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXGemmMetatype from nncf.onnx.graph.nncf_graph_builder import ONNXLayerAttributes -from nncf.onnx.graph.node_utils import get_quantization_axis -from nncf.onnx.graph.node_utils import get_reduction_shape from nncf.onnx.graph.transformations.commands import ONNXTargetPoint +from nncf.quantization.algorithms.min_max.backend import MinMaxAlgoBackend +from nncf.quantization.algorithms.min_max.onnx_backend import ONNXMinMaxAlgoBackend +from tests.post_training.test_templates.models import NNCFGraphToTest +from tests.post_training.test_templates.test_min_max import TemplateTestGetChannelAxes +from tests.post_training.test_templates.test_min_max import TemplateTestGetTargetPointShape +from tests.post_training.test_templates.test_min_max import TemplateTestMinMaxAlgorithm -@dataclass -class TestCase: - nncf_node: NNCFNode - target_point: ONNXTargetPoint - per_channel: bool - ref_reduction_shape: List[int] - - -test_cases = ( - TestCase( - nncf_node=NNCFNode( - { - NNCFNode.ID_NODE_ATTR: 0, - NNCFNode.NODE_NAME_ATTR: "conv_with_weight_per_tensor", - NNCFNode.METATYPE_ATTR: om.ONNXConvolutionMetatype, - NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes(weight_attrs={1: {"shape": [3, 5, 8]}}), - } - ), - target_point=ONNXTargetPoint( - target_type=TargetType.OPERATION_WITH_WEIGHTS, - target_node_name="conv_with_weight_per_tensor", - port_id=1, - ), - per_channel=False, - ref_reduction_shape=None, - ), - TestCase( - nncf_node=NNCFNode( - { - NNCFNode.ID_NODE_ATTR: 0, - NNCFNode.NODE_NAME_ATTR: "conv_with_weight_per_channel", - NNCFNode.METATYPE_ATTR: om.ONNXConvolutionMetatype, - NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes(weight_attrs={1: {"shape": [3, 5, 8]}}), - } - ), - target_point=ONNXTargetPoint( - target_type=TargetType.OPERATION_WITH_WEIGHTS, - target_node_name="gemm_with_weight_per_channel_0_port", - port_id=1, - ), - per_channel=True, - ref_reduction_shape=(1, 2), - ), - TestCase( - nncf_node=NNCFNode( - { - NNCFNode.ID_NODE_ATTR: 0, - NNCFNode.NODE_NAME_ATTR: "gemm_with_weight_per_tensor", - NNCFNode.METATYPE_ATTR: om.ONNXGemmMetatype, - NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes(weight_attrs={1: {"shape": [5, 8]}}), - } - ), - target_point=ONNXTargetPoint( - target_type=TargetType.OPERATION_WITH_WEIGHTS, - target_node_name="gemm_with_weight_per_tensor", - port_id=1, - ), - per_channel=False, - ref_reduction_shape=None, - ), - TestCase( - nncf_node=NNCFNode( - { - NNCFNode.ID_NODE_ATTR: 0, - NNCFNode.NODE_NAME_ATTR: "gemm_with_weight_per_channel", - NNCFNode.METATYPE_ATTR: om.ONNXGemmMetatype, - NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes(weight_attrs={1: {"shape": [5, 8]}}), - } - ), - target_point=ONNXTargetPoint( - target_type=TargetType.OPERATION_WITH_WEIGHTS, - target_node_name="gemm_with_weight_per_channel_0_port", - port_id=1, - ), - per_channel=True, - ref_reduction_shape=(0,), - ), - TestCase( - nncf_node=NNCFNode( - { - NNCFNode.ID_NODE_ATTR: 0, - NNCFNode.NODE_NAME_ATTR: "gemm_with_weight_per_channel_extra_attrs", - NNCFNode.METATYPE_ATTR: om.ONNXGemmMetatype, - NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes( - weight_attrs={1: {"shape": [5, 8]}}, node_attrs={"transA": 0, "transB": 0} - ), - } - ), - target_point=ONNXTargetPoint( - target_type=TargetType.OPERATION_WITH_WEIGHTS, - target_node_name="gemm_with_weight_per_channel_extra_attrs", - port_id=1, - ), - per_channel=True, - ref_reduction_shape=(0,), - ), - TestCase( - nncf_node=NNCFNode( - { - NNCFNode.ID_NODE_ATTR: 0, - NNCFNode.NODE_NAME_ATTR: "gemm_with_weight_per_channel_extra_attrs", - NNCFNode.METATYPE_ATTR: om.ONNXGemmMetatype, - NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes( - weight_attrs={1: {"shape": [5, 8]}}, node_attrs={"transA": 1, "transB": 0} - ), - } - ), - target_point=ONNXTargetPoint( - target_type=TargetType.OPERATION_WITH_WEIGHTS, - target_node_name="gemm_with_weight_per_channel_extra_attrs", - port_id=1, - ), - per_channel=True, - ref_reduction_shape=(0,), - ), - TestCase( - nncf_node=NNCFNode( - { - NNCFNode.ID_NODE_ATTR: 0, - NNCFNode.NODE_NAME_ATTR: "gemm_with_weight_per_channel_transpose", - NNCFNode.METATYPE_ATTR: om.ONNXGemmMetatype, - NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes( - weight_attrs={1: {"shape": [5, 8]}}, node_attrs={"transA": 0, "transB": 1} - ), - } - ), - target_point=ONNXTargetPoint( - target_type=TargetType.OPERATION_WITH_WEIGHTS, - target_node_name="gemm_with_weight_per_channel_transpose", - port_id=1, - ), - per_channel=True, - ref_reduction_shape=(1,), - ), - TestCase( - nncf_node=NNCFNode( - { - NNCFNode.ID_NODE_ATTR: 0, - NNCFNode.NODE_NAME_ATTR: "gemm_with_weight_per_channel_transpose_one_dim", - NNCFNode.METATYPE_ATTR: om.ONNXGemmMetatype, - NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes( - weight_attrs={1: {"shape": [5]}}, node_attrs={"transA": 0, "transB": 1} - ), - } - ), - target_point=ONNXTargetPoint( - target_type=TargetType.OPERATION_WITH_WEIGHTS, - target_node_name="gemm_with_weight_per_channel_0_port", - port_id=1, - ), - per_channel=True, - ref_reduction_shape=(0,), - ), - TestCase( - nncf_node=NNCFNode( - { - NNCFNode.ID_NODE_ATTR: 0, - NNCFNode.NODE_NAME_ATTR: "gemm_with_weight_per_channel_0_port", - NNCFNode.METATYPE_ATTR: om.ONNXGemmMetatype, - NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes( - weight_attrs={0: {"shape": [10, 10, 5]}}, node_attrs={"transA": 0, "transB": 1} - ), - } - ), - target_point=ONNXTargetPoint( - target_type=TargetType.OPERATION_WITH_WEIGHTS, - target_node_name="gemm_with_weight_per_channel_0_port", - port_id=0, - ), - per_channel=True, - ref_reduction_shape=(0, 1), - ), -) - - -@pytest.mark.parametrize( - "test_case", - (test_cases), - ids=[test_case.nncf_node.node_name for test_case in test_cases], -) -def test_get_reduction_shape(test_case): - """Checks the correct return reduction shape in ONNXMinMaxAlgo. - Edge cases: - 1) per-tensor. - 2) transpose axis of GEMM node. - 3) one dimensional weight tensor. - """ - quantization_axis = get_quantization_axis( - is_per_channel=test_case.per_channel, node=test_case.nncf_node, target_point=test_case.target_point - ) - if quantization_axis is not None: # Per-Channel - reduction_shape = get_reduction_shape( - test_case.nncf_node.layer_attributes.weight_attrs[test_case.target_point.port_id]["shape"], - quantization_axis, - ) - assert reduction_shape == test_case.ref_reduction_shape - else: - assert not test_case.per_channel +class TestONNXMinMaxAlgorithm(TemplateTestMinMaxAlgorithm): + @property + def backend(self) -> MinMaxAlgoBackend: + return ONNXMinMaxAlgoBackend + + @property + def conv_metatype(self): + return ONNXConvolutionMetatype + + def create_target_point(self, target_point_type: TargetType, name: str, port_id: int) -> ONNXTargetPoint: + return ONNXTargetPoint(target_point_type, name, port_id) + + +class TestONNXGetTargetPointShape(TemplateTestGetTargetPointShape, TestONNXMinMaxAlgorithm): + def get_nncf_graph(self, weight_port_id: int, weight_shape: Tuple[int]) -> NNCFGraph: + conv_layer_attrs = ONNXLayerAttributes(weight_attrs={weight_port_id: {"shape": weight_shape}}, bias_attrs={}) + return NNCFGraphToTest(ONNXConvolutionMetatype, conv_layer_attrs).nncf_graph + + +class TestONNXGetChannelAxesMinMaxAlgorithm(TemplateTestGetChannelAxes, TestONNXMinMaxAlgorithm): + @property + def depthwiseconv_metatype(self): + return ONNXDepthwiseConvolutionMetatype + + @property + def matmul_metatype(self): + return ONNXGemmMetatype + + @staticmethod + def get_conv_node_attrs(weight_port_id: int, weight_shape: Tuple[int]) -> ONNXLayerAttributes: + return ONNXLayerAttributes(weight_attrs={weight_port_id: {"shape": weight_shape}}, bias_attrs={}) + + @staticmethod + def get_depthwiseconv_node_attrs(weight_port_id: int, weight_shape: Tuple[int]) -> ONNXLayerAttributes: + return TestONNXGetChannelAxesMinMaxAlgorithm.get_conv_node_attrs(weight_port_id, weight_shape) + + @staticmethod + def get_matmul_node_attrs( + weight_port_id: int, transpose_weight: Tuple[int], weight_shape: Tuple[int] + ) -> ONNXLayerAttributes: + weight_attrs = {weight_port_id: {"name": "dummy", "shape": weight_shape}} + if weight_port_id == 0: + gemm_attrs = {"transA": int(transpose_weight), "transB": 0} + elif weight_port_id == 1: + gemm_attrs = {"transA": 0, "transB": int(transpose_weight)} + return ONNXLayerAttributes(weight_attrs=weight_attrs, node_attrs=gemm_attrs) + + def test_get_channel_axes_deptwiseconv_node_ov(self): + pytest.skip("Test is not applied for ONNX backend.") + + def test_get_channel_axes_matmul_torch(self): + pytest.skip("Test is not applied for ONNX backend.") diff --git a/tests/onnx/quantization/test_ptq_params.py b/tests/onnx/quantization/test_ptq_params.py index daadc8a8337..f6c6c041459 100644 --- a/tests/onnx/quantization/test_ptq_params.py +++ b/tests/onnx/quantization/test_ptq_params.py @@ -9,22 +9,28 @@ # See the License for the specific language governing permissions and # limitations under the License. +import numpy as np import pytest +from nncf.common.graph.graph import NNCFGraph from nncf.common.graph.patterns import GraphPattern from nncf.common.graph.patterns.manager import PatternsManager from nncf.common.graph.transformations.commands import TargetType +from nncf.common.graph.transformations.commands import TransformationType from nncf.common.utils.backend import BackendType +from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXConcatMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXConvolutionMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXGemmMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXSoftmaxMetatype from nncf.onnx.graph.nncf_graph_builder import GraphConverter from nncf.onnx.graph.nncf_graph_builder import ONNXLayerAttributes +from nncf.onnx.graph.transformations.commands import ONNXQuantizerInsertionCommand from nncf.onnx.graph.transformations.commands import ONNXTargetPoint from nncf.parameters import TargetDevice from nncf.quantization.algorithms.min_max.algorithm import MinMaxQuantization from nncf.quantization.algorithms.min_max.onnx_backend import ONNXMinMaxAlgoBackend from nncf.scopes import IgnoredScope +from tests.common.quantization.metatypes import CatTestMetatype from tests.common.quantization.metatypes import Conv2dTestMetatype from tests.common.quantization.metatypes import LinearTestMetatype from tests.common.quantization.metatypes import SoftmaxTestMetatype @@ -61,17 +67,34 @@ def check_quantize_outputs_fq_num(self, quantize_outputs, act_num_q, weight_num_ assert act_num_q == 1 assert weight_num_q == 1 + def check_unified_scale_layout(self, layout, unified_scale_group): + assert len(layout.transformations) == len(unified_scale_group) + for t, ref_tp in zip(layout.transformations, unified_scale_group): + assert isinstance(t, ONNXQuantizerInsertionCommand) + assert t.target_point == ref_tp + assert t.type == TransformationType.INSERT + assert t.quantizer_parameters.zero_point == 0 + assert np.isclose(t.quantizer_parameters.scale, 0.03149606) + def target_point(self, target_type: TargetType, target_node_name: str, port_id: int) -> ONNXTargetPoint: return ONNXTargetPoint(target_type, target_node_name, port_id) + def get_backend_tensor(self, value): + return np.array(value) + @property def metatypes_mapping(self): return { Conv2dTestMetatype: ONNXConvolutionMetatype, LinearTestMetatype: ONNXGemmMetatype, SoftmaxTestMetatype: ONNXSoftmaxMetatype, + CatTestMetatype: ONNXConcatMetatype, } + @property + def nncf_graph_cls(self): + return NNCFGraph + @pytest.fixture(scope="session") def test_params(self): linear_model = LinearModel().onnx_model diff --git a/tests/onnx/quantization/test_quantizer_config.py b/tests/onnx/quantization/test_quantizer_config.py index dd778d853ab..947006a1fc7 100644 --- a/tests/onnx/quantization/test_quantizer_config.py +++ b/tests/onnx/quantization/test_quantizer_config.py @@ -11,7 +11,6 @@ import pytest -from nncf.common.graph.transformations.commands import TargetType from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXAddLayerMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXConvolutionMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXDepthwiseConvolutionMetatype @@ -22,26 +21,11 @@ from tests.post_training.test_templates.models import NNCFGraphToTestSumAggregation from tests.post_training.test_templates.test_quantizer_config import TemplateTestQuantizerConfig -ParamsCls = TemplateTestQuantizerConfig.TestGetStatisticsCollectorParameters - class TestQuantizerConfig(TemplateTestQuantizerConfig): def get_algo_backend(self): return ONNXMinMaxAlgoBackend() - @pytest.fixture( - params=[ - pytest.param( - (TargetType.PRE_LAYER_OPERATION, "/Sum_1_0", (0, 2), (0, 1, 2)), - marks=pytest.mark.skip("Ticket 102414: remove hardcoded axes for activations"), - ), - (TargetType.POST_LAYER_OPERATION, "/Conv_1_0", (0, 2, 3), None), - (TargetType.OPERATION_WITH_WEIGHTS, "/Conv_1_0", (1, 2, 3), None), - ] - ) - def statistic_collector_parameters(self, request) -> ParamsCls: - return ParamsCls(*request.param) - @pytest.fixture def single_conv_nncf_graph(self) -> NNCFGraphToTest: conv_layer_attrs = ONNXLayerAttributes(weight_attrs={1: {"shape": [4, 4, 4, 4]}}, bias_attrs={}) diff --git a/tests/onnx/quantization/test_tensor_collector_batch_size.py b/tests/onnx/quantization/test_tensor_collector_batch_size.py new file mode 100644 index 00000000000..6b9a13addcb --- /dev/null +++ b/tests/onnx/quantization/test_tensor_collector_batch_size.py @@ -0,0 +1,50 @@ +# Copyright (c) 2024 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +import pytest + +from nncf.experimental.common.tensor_statistics.collectors import AGGREGATORS_MAP +from nncf.onnx.statistics.collectors import ONNX_REDUCERS_MAP +from nncf.onnx.statistics.collectors import ONNXNNCFCollectorTensorProcessor +from nncf.onnx.statistics.statistics import ONNXMinMaxTensorStatistic +from nncf.onnx.tensor import ONNXNNCFTensor +from tests.common.experimental.test_tensor_collector_batch_size import TemplateTestTensorCollectorBatchSize + + +class TestTensorCollectorBatchSize(TemplateTestTensorCollectorBatchSize): + @staticmethod + def get_tensor_statistics_class(): + return ONNXMinMaxTensorStatistic + + @staticmethod + def get_tensor_processor(): + return ONNXNNCFCollectorTensorProcessor() + + @staticmethod + def get_nncf_tensor_class(): + return ONNXNNCFTensor + + @pytest.fixture(params=ONNX_REDUCERS_MAP.values()) + def reducers(self, request) -> bool: + return request.param + + @pytest.fixture(params=AGGREGATORS_MAP.values()) + def aggregators(self, request) -> bool: + return request.param + + @pytest.fixture(params=[False]) + def inplace(self, request): + return request.param + + @staticmethod + def to_backend_tensor(tensor: np.ndarray): + return tensor diff --git a/tests/onnx/test_metatypes.py b/tests/onnx/test_metatypes.py index 7017fff61d8..f5ccaca73c9 100644 --- a/tests/onnx/test_metatypes.py +++ b/tests/onnx/test_metatypes.py @@ -20,7 +20,9 @@ from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXConstantOfShapeMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXConvolutionMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXDepthwiseConvolutionMetatype +from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXDequantizeLinearMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXGlobalAveragePoolMetatype +from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXQuantizeLinearMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXReluMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXShapeMetatype from nncf.onnx.graph.nncf_graph_builder import GraphConverter @@ -28,6 +30,7 @@ from tests.onnx.models import ModelWithIntEdges from tests.onnx.models import MultiInputOutputModel from tests.onnx.models import OneDepthwiseConvolutionalModel +from tests.onnx.quantization.common import min_max_quantize_model TEST_MODELS = [LinearModel, MultiInputOutputModel, ModelWithIntEdges, OneDepthwiseConvolutionalModel] REF_METATYPES_COUNTERS = [ @@ -52,11 +55,45 @@ [InputNoopMetatype, ONNXConstantOfShapeMetatype, ONNXShapeMetatype, OutputNoopMetatype], [InputNoopMetatype, ONNXDepthwiseConvolutionMetatype, OutputNoopMetatype], ] +QUANTIZED_REF_METATYPES_COUNTERS = [ + REF_METATYPES_COUNTERS[0] + + [ + ONNXQuantizeLinearMetatype, + ONNXDequantizeLinearMetatype, + ] + * 5, + REF_METATYPES_COUNTERS[1] + + [ + ONNXQuantizeLinearMetatype, + ONNXDequantizeLinearMetatype, + ] + * 2, + REF_METATYPES_COUNTERS[2] + + [ + ONNXQuantizeLinearMetatype, + ONNXDequantizeLinearMetatype, + ] + * 0, + REF_METATYPES_COUNTERS[3] + + [ + ONNXQuantizeLinearMetatype, + ONNXDequantizeLinearMetatype, + ] + * 2, +] + +@pytest.mark.parametrize( + ("model_creator_func, ref_metatypes, q_ref_metatypes"), + zip(TEST_MODELS, REF_METATYPES_COUNTERS, QUANTIZED_REF_METATYPES_COUNTERS), +) +def test_mapping_onnx_metatypes(model_creator_func, ref_metatypes, q_ref_metatypes): + def _check_metatypes(model, ref_metatypes): + nncf_graph = GraphConverter.create_nncf_graph(model) + actual_metatypes = [node.metatype for node in nncf_graph.get_all_nodes()] + assert Counter(ref_metatypes) == Counter(actual_metatypes) -@pytest.mark.parametrize(("model_creator_func, ref_metatypes"), zip(TEST_MODELS, REF_METATYPES_COUNTERS)) -def test_mapping_onnx_metatypes(model_creator_func, ref_metatypes): - model = model_creator_func() - nncf_graph = GraphConverter.create_nncf_graph(model.onnx_model) - actual_metatypes = [node.metatype for node in nncf_graph.get_all_nodes()] - assert Counter(ref_metatypes) == Counter(actual_metatypes) + model = model_creator_func().onnx_model + q_model = min_max_quantize_model(model) + _check_metatypes(model, ref_metatypes) + _check_metatypes(q_model, q_ref_metatypes) diff --git a/tests/onnx/test_node_utils.py b/tests/onnx/test_node_utils.py index 09ebfe7de9f..d73bd002acd 100644 --- a/tests/onnx/test_node_utils.py +++ b/tests/onnx/test_node_utils.py @@ -32,13 +32,11 @@ def test_get_bias_value(model): @pytest.mark.parametrize( "shape, axis, expected_channel_axis", [ - ((1, 3, 5, 5), -1, 0), + ((1, 3, 5, 5), 3, 0), ((1, 3, 5, 5), 1, 2), ((1, 3, 5, 5), 0, 3), ((1, 3, 5, 5), 2, 1), - ((1, 3, 5, 5), -2, 1), - ((1,), -1, 0), - ((1, 1), -1, 0), + ((1,), 0, 0), ((1, 1), 1, 0), ((1, 1), 0, 1), ], diff --git a/tests/onnx/test_weightless_model.py b/tests/onnx/test_weightless_model.py index 7dec68d87be..04968c875f9 100644 --- a/tests/onnx/test_weightless_model.py +++ b/tests/onnx/test_weightless_model.py @@ -9,7 +9,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import tempfile from pathlib import Path import onnx @@ -30,7 +29,6 @@ def test_save_weightless_model(tmp_path, model_to_test, model): torch.onnx.export(model, x, onnx_model_path) onnx_model = onnx.load_model(onnx_model_path) - with tempfile.TemporaryDirectory() as tmpdir: - weightless_model_path = tmpdir / Path("weightless_model.onnx") - save_model_without_tensors(onnx_model, weightless_model_path) - assert weightless_model_path.stat().st_size < Path(onnx_model_path).stat().st_size + weightless_model_path = tmp_path / Path("weightless_model.onnx") + save_model_without_tensors(onnx_model, weightless_model_path) + assert weightless_model_path.stat().st_size < Path(onnx_model_path).stat().st_size diff --git a/tests/onnx/weightless_model.py b/tests/onnx/weightless_model.py index 73d0bf9252e..f051aff3f03 100644 --- a/tests/onnx/weightless_model.py +++ b/tests/onnx/weightless_model.py @@ -9,7 +9,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import tempfile from copy import deepcopy from pathlib import Path from typing import Union @@ -49,5 +48,5 @@ def save_model_without_tensors(model: onnx.ModelProto, model_path: Path) -> None """ tensors_location = Path("tensors") copy_model = deepcopy(model) - with tempfile.TemporaryDirectory() as tmpfile: - onnx.save_model(copy_model, model_path, save_as_external_data=True, location=Path(tmpfile) / tensors_location) + onnx.save_model(copy_model, model_path, save_as_external_data=True, location=tensors_location) + (model_path.parent / tensors_location).unlink() diff --git a/tests/openvino/native/data/2024.1/reference_scales/yolo-v3-tiny-onnx_mixed.json b/tests/openvino/native/data/2024.1/reference_scales/yolo-v3-tiny-onnx_mixed.json index adda1213e25..b9d7b247efd 100644 --- a/tests/openvino/native/data/2024.1/reference_scales/yolo-v3-tiny-onnx_mixed.json +++ b/tests/openvino/native/data/2024.1/reference_scales/yolo-v3-tiny-onnx_mixed.json @@ -7155,7 +7155,7 @@ "output_low": -0.3303394019603729, "output_high": 3.881488084793091 }, - "Multiply_6981/fq_weights_1": { + "Multiply_6953/fq_weights_1": { "input_low": [ [ [ @@ -14339,7 +14339,7 @@ "output_low": -1.0861154794692993, "output_high": 7.848060131072998 }, - "Multiply_6939/fq_weights_1": { + "Multiply_6911/fq_weights_1": { "input_low": [ [ [ @@ -21523,7 +21523,7 @@ "output_low": -1.0673936605453491, "output_high": 13.258152961730957 }, - "Multiply_6932/fq_weights_1": { + "Multiply_6904/fq_weights_1": { "input_low": [ [ [ @@ -25123,7 +25123,7 @@ "output_low": -0.6687189340591431, "output_high": 11.511518478393555 }, - "Multiply_6925/fq_weights_1": { + "Multiply_6897/fq_weights_1": { "input_low": [ [ [ @@ -26931,7 +26931,7 @@ "output_low": -0.5482831001281738, "output_high": 6.109440326690674 }, - "Multiply_6918/fq_weights_1": { + "Multiply_6890/fq_weights_1": { "input_low": [ [ [ @@ -27843,7 +27843,7 @@ "output_low": -1.5592432022094727, "output_high": 3.473757028579712 }, - "Multiply_6911/fq_weights_1": { + "Multiply_6883/fq_weights_1": { "input_low": [ [ [ @@ -28319,7 +28319,7 @@ "output_low": -0.2852116525173187, "output_high": 3.020650625228882 }, - "Multiply_6974/fq_weights_1": { + "Multiply_6946/fq_weights_1": { "input_low": [ [ [ @@ -31919,7 +31919,7 @@ "output_low": -0.21894927322864532, "output_high": 1.706294298171997 }, - "Multiply_6960/fq_weights_1": { + "Multiply_6932/fq_weights_1": { "input_low": [ [ [ @@ -39103,7 +39103,7 @@ "output_low": -1.4062703847885132, "output_high": 7.131800174713135 }, - "Multiply_6953/fq_weights_1": { + "Multiply_6925/fq_weights_1": { "input_low": [ [ [ @@ -67791,7 +67791,7 @@ "output_low": -0.7759751081466675, "output_high": 3.3463926315307617 }, - "Multiply_6946/fq_weights_1": { + "Multiply_6918/fq_weights_1": { "input_low": [ [ [ @@ -89293,7 +89293,7 @@ "output_low": -0.3361658453941345, "output_high": 3.5603017807006836 }, - "Multiply_6967/fq_weights_1": { + "Multiply_6939/fq_weights_1": { "input_low": [ [ [ diff --git a/tests/openvino/native/data/2024.1/reference_scales/yolo-v3-tiny-onnx_performance.json b/tests/openvino/native/data/2024.1/reference_scales/yolo-v3-tiny-onnx_performance.json index 62a5ff4984e..d291895eeec 100644 --- a/tests/openvino/native/data/2024.1/reference_scales/yolo-v3-tiny-onnx_performance.json +++ b/tests/openvino/native/data/2024.1/reference_scales/yolo-v3-tiny-onnx_performance.json @@ -7155,7 +7155,7 @@ "output_low": -3.859119176864624, "output_high": 3.828969717025757 }, - "Multiply_6981/fq_weights_1": { + "Multiply_6953/fq_weights_1": { "input_low": [ [ [ @@ -14339,7 +14339,7 @@ "output_low": -7.785910606384277, "output_high": 7.725083351135254 }, - "Multiply_6939/fq_weights_1": { + "Multiply_6911/fq_weights_1": { "input_low": [ [ [ @@ -21523,7 +21523,7 @@ "output_low": -13.285137176513672, "output_high": 13.181346893310547 }, - "Multiply_6932/fq_weights_1": { + "Multiply_6904/fq_weights_1": { "input_low": [ [ [ @@ -25123,7 +25123,7 @@ "output_low": -11.602160453796387, "output_high": 11.511518478393555 }, - "Multiply_6925/fq_weights_1": { + "Multiply_6897/fq_weights_1": { "input_low": [ [ [ @@ -26931,7 +26931,7 @@ "output_low": -6.0120320320129395, "output_high": 5.965063095092773 }, - "Multiply_6918/fq_weights_1": { + "Multiply_6890/fq_weights_1": { "input_low": [ [ [ @@ -27843,7 +27843,7 @@ "output_low": -3.5011093616485596, "output_high": 3.473757028579712 }, - "Multiply_6911/fq_weights_1": { + "Multiply_6883/fq_weights_1": { "input_low": [ [ [ @@ -28319,7 +28319,7 @@ "output_low": -3.0029969215393066, "output_high": 2.9795360565185547 }, - "Multiply_6974/fq_weights_1": { + "Multiply_6946/fq_weights_1": { "input_low": [ [ [ @@ -31919,7 +31919,7 @@ "output_low": -1.6986862421035767, "output_high": 1.685415267944336 }, - "Multiply_6960/fq_weights_1": { + "Multiply_6932/fq_weights_1": { "input_low": [ [ [ @@ -39103,7 +39103,7 @@ "output_low": -7.094355583190918, "output_high": 7.038930892944336 }, - "Multiply_6953/fq_weights_1": { + "Multiply_6925/fq_weights_1": { "input_low": [ [ [ @@ -67791,7 +67791,7 @@ "output_low": -3.372742176055908, "output_high": 3.3463926315307617 }, - "Multiply_6946/fq_weights_1": { + "Multiply_6918/fq_weights_1": { "input_low": [ [ [ @@ -89293,7 +89293,7 @@ "output_low": -3.5883357524871826, "output_high": 3.5603017807006836 }, - "Multiply_6967/fq_weights_1": { + "Multiply_6939/fq_weights_1": { "input_low": [ [ [ diff --git a/tests/openvino/native/quantization/test_min_max.py b/tests/openvino/native/quantization/test_min_max.py new file mode 100644 index 00000000000..6873a05dd45 --- /dev/null +++ b/tests/openvino/native/quantization/test_min_max.py @@ -0,0 +1,80 @@ +# Copyright (c) 2024 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Tuple + +import pytest + +from nncf.common.graph.graph import NNCFGraph +from nncf.common.graph.transformations.commands import TargetType +from nncf.openvino.graph.layer_attributes import OVLayerAttributes +from nncf.openvino.graph.metatypes.openvino_metatypes import OVConvolutionMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVDepthwiseConvolutionMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVMatMulMetatype +from nncf.openvino.graph.transformations.commands import OVTargetPoint +from nncf.quantization.algorithms.min_max.backend import MinMaxAlgoBackend +from nncf.quantization.algorithms.min_max.openvino_backend import OVMinMaxAlgoBackend +from tests.post_training.test_templates.models import NNCFGraphToTest +from tests.post_training.test_templates.test_min_max import TemplateTestGetChannelAxes +from tests.post_training.test_templates.test_min_max import TemplateTestGetTargetPointShape +from tests.post_training.test_templates.test_min_max import TemplateTestMinMaxAlgorithm + + +class TestOVMinMaxAlgorithm(TemplateTestMinMaxAlgorithm): + @property + def backend(self) -> MinMaxAlgoBackend: + return OVMinMaxAlgoBackend + + @property + def conv_metatype(self): + return OVConvolutionMetatype + + def create_target_point(self, target_point_type: TargetType, name: str, port_id: int) -> OVTargetPoint: + return OVTargetPoint(target_point_type, name, port_id) + + +class TestOVGetTargetPointShape(TemplateTestGetTargetPointShape, TestOVMinMaxAlgorithm): + def get_nncf_graph(self, weight_port_id: int, weight_shape: Tuple[int]) -> NNCFGraph: + conv_layer_attrs = OVLayerAttributes({weight_port_id: {"name": "dummy", "shape": weight_shape}}) + return NNCFGraphToTest(OVConvolutionMetatype, conv_layer_attrs).nncf_graph + + +class TestOVGetChannelAxes(TemplateTestGetChannelAxes, TestOVMinMaxAlgorithm): + @property + def depthwiseconv_metatype(self): + return OVDepthwiseConvolutionMetatype + + @property + def matmul_metatype(self): + return OVMatMulMetatype + + @staticmethod + def get_conv_node_attrs(weight_port_id: int, weight_shape: Tuple[int]) -> OVLayerAttributes: + constant_attributes = {weight_port_id: {"name": "dummy", "shape": weight_shape}} + return OVLayerAttributes(constant_attributes, {}, {}) + + @staticmethod + def get_depthwiseconv_node_attrs(weight_port_id: int, weight_shape: Tuple[int]) -> OVLayerAttributes: + return TestOVGetChannelAxes.get_conv_node_attrs(weight_port_id, weight_shape) + + @staticmethod + def get_matmul_node_attrs( + weight_port_id: int, transpose_weight: Tuple[int], weight_shape: Tuple[int] + ) -> OVLayerAttributes: + constant_attributes = {weight_port_id: {"name": "dummy", "shape": weight_shape}} + constant_attributes[weight_port_id]["transpose"] = transpose_weight + return OVLayerAttributes(constant_attributes, {}, {}) + + def test_get_channel_axes_deptwiseconv_node_onnx_torch(self): + pytest.skip("Test is not applied for OV backend.") + + def test_get_channel_axes_matmul_torch(self): + pytest.skip("Test is not applied for OV backend.") diff --git a/tests/openvino/native/quantization/test_ptq_params.py b/tests/openvino/native/quantization/test_ptq_params.py index 71fda6d1a7c..73b73e511ea 100644 --- a/tests/openvino/native/quantization/test_ptq_params.py +++ b/tests/openvino/native/quantization/test_ptq_params.py @@ -9,22 +9,28 @@ # See the License for the specific language governing permissions and # limitations under the License. +import numpy as np import pytest +from nncf.common.graph.graph import NNCFGraph from nncf.common.graph.patterns import GraphPattern from nncf.common.graph.patterns.manager import PatternsManager from nncf.common.graph.transformations.commands import TargetType +from nncf.common.graph.transformations.commands import TransformationType from nncf.common.hardware.config import HW_CONFIG_TYPE_TARGET_DEVICE_MAP from nncf.common.utils.backend import BackendType +from nncf.openvino.graph.metatypes.openvino_metatypes import OVConcatMetatype from nncf.openvino.graph.metatypes.openvino_metatypes import OVConvolutionMetatype from nncf.openvino.graph.metatypes.openvino_metatypes import OVMatMulMetatype from nncf.openvino.graph.metatypes.openvino_metatypes import OVSoftmaxMetatype from nncf.openvino.graph.nncf_graph_builder import GraphConverter +from nncf.openvino.graph.transformations.commands import OVQuantizerInsertionCommand from nncf.openvino.graph.transformations.commands import OVTargetPoint from nncf.parameters import TargetDevice from nncf.quantization.algorithms.min_max.algorithm import MinMaxQuantization from nncf.quantization.algorithms.min_max.openvino_backend import OVMinMaxAlgoBackend from nncf.scopes import IgnoredScope +from tests.common.quantization.metatypes import CatTestMetatype from tests.common.quantization.metatypes import Conv2dTestMetatype from tests.common.quantization.metatypes import LinearTestMetatype from tests.common.quantization.metatypes import SoftmaxTestMetatype @@ -60,17 +66,34 @@ def check_quantize_outputs_fq_num(self, quantize_outputs, act_num_q, weight_num_ assert act_num_q == 1 assert weight_num_q == 1 + def check_unified_scale_layout(self, layout, unified_scale_group): + assert len(layout.transformations) == len(unified_scale_group) + for t, ref_tp in zip(layout.transformations, unified_scale_group): + assert isinstance(t, OVQuantizerInsertionCommand) + assert t.target_point == ref_tp + assert t.type == TransformationType.INSERT + assert np.isclose(t.quantizer_parameters.input_low.data, -4.031496) + assert np.isclose(t.quantizer_parameters.input_high.data, 4) + def target_point(self, target_type: TargetType, target_node_name: str, port_id: int) -> OVTargetPoint: return OVTargetPoint(target_type, target_node_name, port_id) + def get_backend_tensor(self, value): + return np.array(value) + @property def metatypes_mapping(self): return { Conv2dTestMetatype: OVConvolutionMetatype, LinearTestMetatype: OVMatMulMetatype, SoftmaxTestMetatype: OVSoftmaxMetatype, + CatTestMetatype: OVConcatMetatype, } + @property + def nncf_graph_cls(self): + return NNCFGraph + @pytest.fixture(scope="session") def test_params(self): linear_model = LinearModel().ov_model diff --git a/tests/openvino/native/quantization/test_quantizer_config.py b/tests/openvino/native/quantization/test_quantizer_config.py index 86758011c90..63472133787 100644 --- a/tests/openvino/native/quantization/test_quantizer_config.py +++ b/tests/openvino/native/quantization/test_quantizer_config.py @@ -11,7 +11,6 @@ import pytest -from nncf.common.graph.transformations.commands import TargetType from nncf.openvino.graph.layer_attributes import OVLayerAttributes from nncf.openvino.graph.metatypes.openvino_metatypes import OVConvolutionMetatype from nncf.openvino.graph.metatypes.openvino_metatypes import OVDepthwiseConvolutionMetatype @@ -22,26 +21,11 @@ from tests.post_training.test_templates.models import NNCFGraphToTestSumAggregation from tests.post_training.test_templates.test_quantizer_config import TemplateTestQuantizerConfig -ParamsCls = TemplateTestQuantizerConfig.TestGetStatisticsCollectorParameters - class TestQuantizerConfig(TemplateTestQuantizerConfig): def get_algo_backend(self): return OVMinMaxAlgoBackend() - @pytest.fixture( - params=[ - pytest.param( - (TargetType.PRE_LAYER_OPERATION, "/Sum_1_0", (0, 2), (0, 1, 2)), - marks=pytest.mark.skip("Ticket 102414: remove hardcoded axes for activations"), - ), - (TargetType.POST_LAYER_OPERATION, "/Conv_1_0", (0, 2, 3), None), - (TargetType.OPERATION_WITH_WEIGHTS, "/Conv_1_0", (1, 2, 3), None), - ] - ) - def statistic_collector_parameters(self, request) -> ParamsCls: - return ParamsCls(*request.param) - @pytest.fixture def single_conv_nncf_graph(self) -> NNCFGraphToTest: conv_layer_attrs = OVLayerAttributes({0: {"name": "dummy", "shape": (4, 4, 4, 4)}}) diff --git a/tests/openvino/native/test_nncf_graph_builder.py b/tests/openvino/native/test_nncf_graph_builder.py index 65587b26459..12389ae9c15 100644 --- a/tests/openvino/native/test_nncf_graph_builder.py +++ b/tests/openvino/native/test_nncf_graph_builder.py @@ -97,3 +97,45 @@ def _get_default_nncf_graph_edge(from_node, to_node, input_port_id, output_port_ ) assert set(nncf_graph.get_input_edges(mm_node)) == ref_input_edges assert set(nncf_graph.get_output_edges(input_node)) == ref_output_edges + + +@pytest.mark.parametrize( + "ov_type,expected_nncf_dtype", + [ + (ov.Type.f16, Dtype.FLOAT), + (ov.Type.f32, Dtype.FLOAT), + (ov.Type.f64, Dtype.FLOAT), + (ov.Type.i4, Dtype.INTEGER), + (ov.Type.i8, Dtype.INTEGER), + (ov.Type.i16, Dtype.INTEGER), + (ov.Type.i32, Dtype.INTEGER), + (ov.Type.i64, Dtype.INTEGER), + (ov.Type.u1, Dtype.INTEGER), + (ov.Type.u4, Dtype.INTEGER), + (ov.Type.u8, Dtype.INTEGER), + (ov.Type.u16, Dtype.INTEGER), + (ov.Type.u32, Dtype.INTEGER), + (ov.Type.u64, Dtype.INTEGER), + (ov.Type.boolean, Dtype.INTEGER), + (ov.Type.string, Dtype.INTEGER), + ], +) +def test_convert_to_nncf_dtype_supported_types(ov_type: ov.Type, expected_nncf_dtype: Dtype): + actual_nncf_dtype = GraphConverter.convert_to_nncf_dtype(ov_type) + assert actual_nncf_dtype == expected_nncf_dtype + + +@pytest.mark.parametrize( + "ov_type", + [ + ov.Type.bf16, + ov.Type.nf4, + ov.Type.undefined, + # TODO(andrey-churkin): Add in OV 2024.0 + # ov.Type.f8e4m3, + # ov.Type.f8e5m2, + ], +) +def test_convert_to_nncf_dtype_unsupported_types(ov_type: ov.Type): + with pytest.raises(NotImplementedError): + _ = GraphConverter.convert_to_nncf_dtype(ov_type) diff --git a/tests/openvino/native/test_node_utils.py b/tests/openvino/native/test_node_utils.py index 74ca6462fcf..4fdc7eea444 100644 --- a/tests/openvino/native/test_node_utils.py +++ b/tests/openvino/native/test_node_utils.py @@ -18,7 +18,6 @@ from nncf.openvino.graph.layer_attributes import OVLayerAttributes from nncf.openvino.graph.metatypes.openvino_metatypes import OVMatMulMetatype from nncf.openvino.graph.nncf_graph_builder import GraphConverter -from nncf.openvino.graph.node_utils import get_channel_agnostic_reduction_axes from nncf.openvino.graph.node_utils import get_weight_channel_axes from nncf.openvino.graph.node_utils import get_weight_value from nncf.openvino.graph.node_utils import get_weighted_layer_attributes @@ -97,20 +96,3 @@ def test_get_weight_channel_axes_for_matmul(weights_port_id, transpose, shape, e assert len(actual_channel_axes) == len(expected_channel_axes) assert all(a == b for a, b in zip(actual_channel_axes, expected_channel_axes)) - - -@pytest.mark.parametrize( - "shape, channel_axes, ref_reduction_axes", - [ - ((1, 128), [-1], (0,)), - ((1, 256, 1), [-2], (0, 2)), - ((1, 128, 512), [-1], (0, 1)), - ((1, 3, 224, 224), [1], (0, 2, 3)), - ((1, 1, 12, 12), [1], (0, 2, 3)), - ((1, 1, 12, 12), [1, 2], (0, 3)), - ], -) -def test_get_channel_agnostic_reduction_axes(shape, channel_axes, ref_reduction_axes): - reduction_axes = get_channel_agnostic_reduction_axes(channel_axes=channel_axes, shape=shape) - - assert reduction_axes == ref_reduction_axes diff --git a/tests/openvino/native/test_tensor_collector_batch_size.py b/tests/openvino/native/test_tensor_collector_batch_size.py new file mode 100644 index 00000000000..b9cf472e498 --- /dev/null +++ b/tests/openvino/native/test_tensor_collector_batch_size.py @@ -0,0 +1,50 @@ +# Copyright (c) 2024 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +import pytest + +from nncf.experimental.common.tensor_statistics.collectors import AGGREGATORS_MAP +from nncf.openvino.statistics.collectors import OV_REDUCERS_MAP +from nncf.openvino.statistics.collectors import OVNNCFCollectorTensorProcessor +from nncf.openvino.statistics.statistics import OVMinMaxTensorStatistic +from nncf.openvino.tensor import OVNNCFTensor +from tests.common.experimental.test_tensor_collector_batch_size import TemplateTestTensorCollectorBatchSize + + +class TestTensorCollectorBatchSize(TemplateTestTensorCollectorBatchSize): + @staticmethod + def get_tensor_statistics_class(): + return OVMinMaxTensorStatistic + + @staticmethod + def get_tensor_processor(): + return OVNNCFCollectorTensorProcessor() + + @staticmethod + def get_nncf_tensor_class(): + return OVNNCFTensor + + @pytest.fixture(params=OV_REDUCERS_MAP.values()) + def reducers(self, request) -> bool: + return request.param + + @pytest.fixture(params=AGGREGATORS_MAP.values()) + def aggregators(self, request) -> bool: + return request.param + + @pytest.fixture(params=[True, False]) + def inplace(self, request): + return request.param + + @staticmethod + def to_backend_tensor(tensor: np.ndarray): + return tensor diff --git a/tests/openvino/tools/calibrate.py b/tests/openvino/tools/calibrate.py index b009fe1d5f9..4687c86ee03 100644 --- a/tests/openvino/tools/calibrate.py +++ b/tests/openvino/tools/calibrate.py @@ -22,7 +22,7 @@ from dataclasses import replace from enum import Enum from itertools import islice -from typing import Any, Iterable, List, Optional, TypeVar +from typing import Any, Dict, Iterable, List, Optional, TypeVar import numpy as np import openvino.runtime as ov @@ -101,6 +101,8 @@ def parse_args(): parser.add_argument("--impl", help="NNCF OpenVINO backend implementation.", choices=["pot", "native"], default=None) + parser.add_argument("--batch_size", help="Batch size", type=int, default=1) + return parser.parse_args() @@ -884,6 +886,7 @@ class DataItem: def __init__(self, model_evaluator): self.model_evaluator = model_evaluator + self.batch_size = self.model_evaluator.dataset.batch def __iter__(self): for sequence in self.model_evaluator.dataset: @@ -1032,6 +1035,33 @@ def filter_configuration(config: Config) -> Config: return config +def update_accuracy_checker_config(accuracy_checker_config: Config, batch_size: int) -> None: + """ + Updates batch section of accuracy checker configuration file by batch_size value. + + :param accuracy_checker_config: Accuracy checker configuration file. + :param batch_size: Batch size value. + """ + for model in accuracy_checker_config["models"]: + for dataset in model["datasets"]: + dataset["batch"] = batch_size + print(f"Updated batch size value to {batch_size}") + + +def update_nncf_algorithms_config(nncf_algorithms_config: Dict[str, Dict[str, Any]], batch_size: int) -> None: + """ + Updates subset_size parameter depending on batch_size and subset_size from an algorithm config. + + :param nncf_algorithms_config: Configuration file of an algorithm. + :param batch_size: Batch size value. + """ + for nncf_method, config in nncf_algorithms_config.items(): + subset_size = config.get("subset_size", 300) + new_subset_size = subset_size // batch_size + config["subset_size"] = new_subset_size + print(f"Updated subset_size value for {nncf_method} method to {new_subset_size} ") + + def main(): args = parse_args() if args.impl is not None: @@ -1042,6 +1072,10 @@ def main(): xml_path, bin_path = get_model_paths(config.model) accuracy_checker_config = get_accuracy_checker_config(config.engine) nncf_algorithms_config = get_nncf_algorithms_config(config.compression, args.output_dir) + assert args.batch_size >= 0 + if args.batch_size > 1: + update_accuracy_checker_config(accuracy_checker_config, args.batch_size) + update_nncf_algorithms_config(nncf_algorithms_config, args.batch_size) set_log_file(f"{args.output_dir}/log.txt") output_dir = os.path.join(args.output_dir, "optimized") @@ -1052,7 +1086,7 @@ def main(): "quantize_with_accuracy_control": quantize_model_with_accuracy_control, } for algo_name, algo_config in nncf_algorithms_config.items(): - algo_fn = algo_name_to_method_map.get(algo_name, None) + algo_fn = algo_name_to_method_map.get(algo_name) if algo_fn: quantize_model_arguments = { "xml_path": xml_path, diff --git a/tests/openvino/tools/config.py b/tests/openvino/tools/config.py index e05de9620e7..723dbb5f72a 100644 --- a/tests/openvino/tools/config.py +++ b/tests/openvino/tools/config.py @@ -283,7 +283,7 @@ def _configure_ac_params(self): ac_conf = ConfigReader.convert_paths(ac_conf) ConfigReader._filter_launchers(ac_conf, filtering_params, mode=mode) for req_num in ["stat_requests_number", "eval_requests_number"]: - ac_conf[req_num] = self.engine[req_num] if req_num in self.engine else None + ac_conf[req_num] = self.engine.get(req_num, None) self["engine"] = ac_conf diff --git a/tests/post_training/README.md b/tests/post_training/README.md index ef6ef864fb4..af2639c8ec7 100644 --- a/tests/post_training/README.md +++ b/tests/post_training/README.md @@ -60,6 +60,7 @@ Additional arguments: - `--fp32` to run validation of not quantized model - `--cuda` to enable CUDA_TORCH backend - `--subset-size=N` to force subset_size of calibration dataset +- `--batch-size=N` to use batch_size for calibration. Some of the models do not support --batch-size > 1. For such models, please, use --batch-size=1. - `--benchmark` to collect throughput statistics, add `FPS` column to result.csv - `--extra-columns` to add additional columns to reports.csv: - `Stat. collection time` - time of statistic collection @@ -115,3 +116,9 @@ Run test with additional columns: ```bash pytest --data= --extra-columns tests/post_training/test_quantize_conformance.py ``` + +Run test with calibration dataset having batch-size=10 for all models: + +```bash +pytest --data= --batch-size 10 tests/post_training/test_quantize_conformance.py +``` diff --git a/tests/post_training/conftest.py b/tests/post_training/conftest.py index 4c1089ee453..8288b993b31 100644 --- a/tests/post_training/conftest.py +++ b/tests/post_training/conftest.py @@ -19,6 +19,7 @@ def pytest_addoption(parser): parser.addoption("--data", action="store", help="Data directory") parser.addoption("--output", action="store", default="./tmp/", help="Directory to store artifacts") parser.addoption("--no-eval", action="store_true", help="Skip validation step") + parser.addoption("--batch-size", action="store", default=1, type=int, help="Batch size of calibration dataset") parser.addoption("--subset-size", type=int, default=None, help="Set subset size") parser.addoption("--fp32", action="store_true", help="Test original model") parser.addoption("--cuda", action="store_true", help="Enable CUDA_TORCH backend") diff --git a/tests/post_training/data/wc_reference_data.yaml b/tests/post_training/data/wc_reference_data.yaml index cd27628038a..5235d155244 100644 --- a/tests/post_training/data/wc_reference_data.yaml +++ b/tests/post_training/data/wc_reference_data.yaml @@ -1,8 +1,16 @@ tinyllama_data_free_backend_OV: metric_value: 0.72057 + num_int4: 228 + num_int8: 84 tinyllama_data_aware_backend_OV: metric_value: 0.83084 + num_int4: 184 + num_int8: 128 tinyllama_data_aware_awq_backend_OV: metric_value: 0.81237 + num_int4: 184 + num_int8: 128 tinyllama_data_aware_awq_stateful_backend_OV: - metric_value: 0.81237 \ No newline at end of file + metric_value: 0.81237 + num_int4: 184 + num_int8: 128 \ No newline at end of file diff --git a/tests/post_training/model_scope.py b/tests/post_training/model_scope.py index 69c1b9890e0..e6c46f715d0 100644 --- a/tests/post_training/model_scope.py +++ b/tests/post_training/model_scope.py @@ -39,6 +39,7 @@ "subset_size": 2, }, "backends": ALL_PTQ_BACKENDS + [BackendType.OPTIMUM], + "is_batch_size_supported": False, }, { "reported_name": "hf/hf-internal-testing/tiny-random-GPTNeoXForCausalLM", @@ -50,6 +51,7 @@ "subset_size": 2, }, "backends": [BackendType.OPTIMUM], + "is_batch_size_supported": False, }, # Timm models { @@ -159,6 +161,7 @@ ), }, "backends": NNCF_PTQ_BACKENDS, + "is_batch_size_supported": False, # Issue is raised during export with dynamich shape. }, { "reported_name": "timm/mobilenetv2_050", @@ -296,6 +299,7 @@ "sensitivity_metric": SensitivityMetric.WEIGHT_QUANTIZATION_ERROR, }, "backends": [BackendType.OV], + "is_batch_size_supported": False, }, { "reported_name": "tinyllama_data_aware", @@ -303,6 +307,7 @@ "pipeline_cls": LMWeightCompression, "compression_params": {"group_size": 64, "ratio": 0.8, "mode": CompressWeightsMode.INT4_SYM}, "backends": [BackendType.OV], + "is_batch_size_supported": False, }, { "reported_name": "tinyllama_data_aware_awq", @@ -310,6 +315,7 @@ "pipeline_cls": LMWeightCompression, "compression_params": {"group_size": 64, "ratio": 0.8, "mode": CompressWeightsMode.INT4_SYM, "awq": True}, "backends": [BackendType.OV], + "is_batch_size_supported": False, }, { "reported_name": "tinyllama_data_aware_awq_stateful", @@ -318,6 +324,7 @@ "compression_params": {"group_size": 64, "ratio": 0.8, "mode": CompressWeightsMode.INT4_SYM, "awq": True}, "params": {"is_stateful": True}, "backends": [BackendType.OV], + "is_batch_size_supported": False, }, ] @@ -332,6 +339,8 @@ def generate_tests_scope(models_list: List[Dict]) -> Dict[str, dict]: for test_model_param in models_list: for backend in test_model_param["backends"] + [BackendType.FP32]: model_param = copy.deepcopy(test_model_param) + if "is_batch_size_supported" not in model_param: # Set default value of is_batch_size_supported. + model_param["is_batch_size_supported"] = True reported_name = model_param["reported_name"] model_id = reported_name_to_model_id_mapping[reported_name] if backend == BackendType.FP32: diff --git a/tests/post_training/pipelines/base.py b/tests/post_training/pipelines/base.py index e3f35f35657..353e53930d4 100644 --- a/tests/post_training/pipelines/base.py +++ b/tests/post_training/pipelines/base.py @@ -71,6 +71,13 @@ def fill(self, stdout: str) -> None: """ +@dataclass +class NumCompressNodes: + num_fq_nodes: Optional[int] = None + num_int8: Optional[int] = None + num_int4: Optional[int] = None + + @dataclass class PTQTimeStats(StatsFromOutput): """ @@ -130,12 +137,12 @@ class RunInfo: metric_name: Optional[str] = None metric_value: Optional[float] = None metric_diff: Optional[float] = None - num_fq_nodes: Optional[float] = None compression_memory_usage: Optional[int] = None status: Optional[str] = None fps: Optional[float] = None time_total: Optional[float] = None time_compression: Optional[float] = None + num_compress_nodes: Optional[NumCompressNodes] = None stats_from_output = StatsFromOutput() @staticmethod @@ -157,7 +164,9 @@ def get_result_dict(self): "Metric name": self.metric_name, "Metric value": self.metric_value, "Metric diff": self.metric_diff, - "Num FQ": self.num_fq_nodes, + "Num FQ": self.num_compress_nodes.num_fq_nodes, + "Num int4": self.num_compress_nodes.num_int4, + "Num int8": self.num_compress_nodes.num_int8, "RAM MiB": self.format_memory_usage(self.compression_memory_usage), "Compr. time": self.format_time(self.time_compression), **self.stats_from_output.get_stats(), @@ -184,6 +193,7 @@ def __init__( no_eval: bool, run_benchmark_app: bool, params: dict = None, + batch_size: int = 1, ) -> None: self.reported_name = reported_name self.model_id = model_id @@ -193,6 +203,7 @@ def __init__( self.data_dir = data_dir self.reference_data = reference_data self.params = params or {} + self.batch_size = batch_size self.no_eval = no_eval self.run_benchmark_app = run_benchmark_app self.output_model_dir: Path = self.output_dir / self.reported_name / self.backend.value @@ -208,7 +219,7 @@ def __init__( self.dummy_tensor = None self.input_size = None - self.run_info = RunInfo(model=reported_name, backend=self.backend) + self.run_info = RunInfo(model=reported_name, backend=self.backend, num_compress_nodes=NumCompressNodes()) @abstractmethod def prepare_preprocessor(self) -> None: @@ -381,7 +392,7 @@ def get_num_compressed(self) -> None: if node_type == "FakeQuantize": num_fq += 1 - self.run_info.num_fq_nodes = num_fq + self.run_info.num_compress_nodes.num_fq_nodes = num_fq def run_bench(self) -> None: """ diff --git a/tests/post_training/pipelines/image_classification_timm.py b/tests/post_training/pipelines/image_classification_timm.py index c17dc3e2ef4..5cda1d04e6d 100644 --- a/tests/post_training/pipelines/image_classification_timm.py +++ b/tests/post_training/pipelines/image_classification_timm.py @@ -45,15 +45,23 @@ def prepare_model(self) -> None: timm_model.eval() timm_model = replace_timm_custom_modules_with_torch_native(timm_model) self.model_cfg = timm_model.default_cfg - self.input_size = [1] + list(timm_model.default_cfg["input_size"]) + self.input_size = [self.batch_size] + list(timm_model.default_cfg["input_size"]) self.dummy_tensor = torch.rand(self.input_size) + if self.batch_size > 1: # Dynamic batch_size shape export + self.input_size[0] = -1 if self.backend in PT_BACKENDS: self.model = timm_model if self.backend == BackendType.ONNX: onnx_path = self.fp32_model_dir / "model_fp32.onnx" - torch.onnx.export(timm_model, self.dummy_tensor, onnx_path, export_params=True, opset_version=13) + additional_kwargs = {} + if self.batch_size > 1: + additional_kwargs["input_names"] = ["image"] + additional_kwargs["dynamic_axes"] = {"image": {0: "batch"}} + torch.onnx.export( + timm_model, self.dummy_tensor, onnx_path, export_params=True, opset_version=13, **additional_kwargs + ) self.model = onnx.load(onnx_path) self.input_name = self.model.graph.input[0].name @@ -112,7 +120,7 @@ def transform_fn(data_item): def prepare_calibration_dataset(self): dataset = datasets.ImageFolder(root=self.data_dir / "imagenet" / "val", transform=self.transform) - loader = torch.utils.data.DataLoader(dataset, batch_size=1, num_workers=2, shuffle=False) + loader = torch.utils.data.DataLoader(dataset, batch_size=self.batch_size, num_workers=2, shuffle=False) self.calibration_dataset = nncf.Dataset(loader, self.get_transform_calibration_fn()) @@ -122,8 +130,9 @@ def _validate(self): dataset_size = len(val_loader) - predictions = [0] * dataset_size - references = [-1] * dataset_size + # Initialize result tensors for async inference support. + predictions = np.zeros((dataset_size)) + references = -1 * np.ones((dataset_size)) core = ov.Core() @@ -143,7 +152,7 @@ def _validate(self): def process_result(request, userdata): output_data = request.get_output_tensor().data predicted_label = np.argmax(output_data, axis=1) - predictions[userdata] = [predicted_label] + predictions[userdata] = predicted_label pbar.progress.update(pbar.task, advance=1) infer_queue.set_callback(process_result) @@ -156,8 +165,6 @@ def process_result(request, userdata): infer_queue.wait_all() - predictions = np.concatenate(predictions, axis=0) - references = np.concatenate(references, axis=0) acc_top1 = accuracy_score(predictions, references) self.run_info.metric_name = "Acc@1" diff --git a/tests/post_training/pipelines/lm_weight_compression.py b/tests/post_training/pipelines/lm_weight_compression.py index b1a6e5853dc..84829e63288 100644 --- a/tests/post_training/pipelines/lm_weight_compression.py +++ b/tests/post_training/pipelines/lm_weight_compression.py @@ -161,7 +161,21 @@ def save_compressed_model(self) -> None: self.model_hf._save_config(self.output_model_dir) def get_num_compressed(self) -> None: - pass + """ + Get number of the i8, u8, i4, u4 ops in the compressed IR. + """ + num_int8 = 0 + num_int4 = 0 + + for node in self.model.get_ops(): + for i in range(node.get_output_size()): + if node.get_output_element_type(i).get_type_name() in ["i8", "u8"]: + num_int8 += 1 + if node.get_output_element_type(i).get_type_name() in ["i4", "u4"]: + num_int4 += 1 + + self.run_info.num_compress_nodes.num_int8 = num_int8 + self.run_info.num_compress_nodes.num_int4 = num_int4 def run_bench(self) -> None: pass @@ -219,3 +233,19 @@ def _validate(self): similarity = all_metrics["similarity"][0] self.run_info.metric_name = "Similarity" self.run_info.metric_value = round(similarity, 5) + + num_int4_reference = self.reference_data.get("num_int4") + num_int8_reference = self.reference_data.get("num_int8") + + num_int4_value = self.run_info.num_compress_nodes.num_int4 + num_int8_value = self.run_info.num_compress_nodes.num_int8 + + if num_int4_reference != num_int4_value: + status_msg = f"Regression: The number of int4 ops is different \ + than reference {num_int4_reference} != {num_int4_value}" + raise ValueError(status_msg) + + if num_int8_reference != num_int8_value: + status_msg = f"Regression: The number of int8 ops is different \ + than reference {num_int8_reference} != {num_int8_value}" + raise ValueError(status_msg) diff --git a/tests/post_training/test_quantize_conformance.py b/tests/post_training/test_quantize_conformance.py index 2b3c06fa0b7..815c1f33b7b 100644 --- a/tests/post_training/test_quantize_conformance.py +++ b/tests/post_training/test_quantize_conformance.py @@ -45,6 +45,11 @@ def fixture_no_eval(pytestconfig): return pytestconfig.getoption("no_eval") +@pytest.fixture(scope="session", name="batch_size") +def fixture_batch_size(pytestconfig): + return pytestconfig.getoption("batch_size") + + @pytest.fixture(scope="session", name="subset_size") def fixture_subset_size(pytestconfig): return pytestconfig.getoption("subset_size") @@ -127,11 +132,13 @@ def fixture_wc_report_data(output_dir): df.to_csv(output_dir / "results.csv", index=False) -def maybe_skip_test_case(test_model_param, run_fp32_backend, run_torch_cuda_backend): +def maybe_skip_test_case(test_model_param, run_fp32_backend, run_torch_cuda_backend, batch_size): if test_model_param["backend"] == BackendType.FP32 and not run_fp32_backend: pytest.skip("To run test for not quantized model use --fp32 argument") if test_model_param["backend"] == BackendType.CUDA_TORCH and not run_torch_cuda_backend: pytest.skip("To run test for CUDA_TORCH backend use --cuda argument") + if batch_size > 1 and not test_model_param["is_batch_size_supported"]: + pytest.skip("The model does not support batch_size > 1. Please use --batch-size 1.") return test_model_param @@ -196,6 +203,7 @@ def test_ptq_quantization( output_dir: Path, ptq_result_data: Dict[str, RunInfo], no_eval: bool, + batch_size: int, run_fp32_backend: bool, run_torch_cuda_backend: bool, subset_size: Optional[int], @@ -211,11 +219,21 @@ def test_ptq_quantization( if test_case_name not in ptq_reference_data: raise nncf.ValidationError(f"{test_case_name} does not exist in 'reference_data.yaml'") test_model_param = PTQ_TEST_CASES[test_case_name] - maybe_skip_test_case(test_model_param, run_fp32_backend, run_torch_cuda_backend) + maybe_skip_test_case(test_model_param, run_fp32_backend, run_torch_cuda_backend, batch_size) pipeline_cls = test_model_param["pipeline_cls"] + # Recalculates subset_size when subset_size is None + if batch_size > 1 and subset_size is None: + subset_size = 300 // batch_size + print(f"Update subset_size value based on provided batch_size to {subset_size}.") pipeline_kwargs = create_pipeline_kwargs(test_model_param, subset_size, test_case_name, ptq_reference_data) pipeline_kwargs.update( - {"output_dir": output_dir, "data_dir": data_dir, "no_eval": no_eval, "run_benchmark_app": run_benchmark_app} + { + "output_dir": output_dir, + "data_dir": data_dir, + "no_eval": no_eval, + "run_benchmark_app": run_benchmark_app, + "batch_size": batch_size, + } ) pipeline: BaseTestPipeline = pipeline_cls(**pipeline_kwargs) pipeline.run() @@ -252,6 +270,7 @@ def test_weight_compression( output_dir: Path, wc_result_data: Dict[str, RunInfo], no_eval: bool, + batch_size: int, run_fp32_backend: bool, run_torch_cuda_backend: bool, subset_size: Optional[int], @@ -267,11 +286,17 @@ def test_weight_compression( if test_case_name not in wc_reference_data: raise RuntimeError(f"{test_case_name} is not defined in `wc_reference_data` fixture") test_model_param = WC_TEST_CASES[test_case_name] - maybe_skip_test_case(test_model_param, run_fp32_backend, run_torch_cuda_backend) + maybe_skip_test_case(test_model_param, run_fp32_backend, run_torch_cuda_backend, batch_size) pipeline_cls = test_model_param["pipeline_cls"] pipeline_kwargs = create_pipeline_kwargs(test_model_param, subset_size, test_case_name, wc_reference_data) pipeline_kwargs.update( - {"output_dir": output_dir, "data_dir": data_dir, "no_eval": no_eval, "run_benchmark_app": run_benchmark_app} + { + "output_dir": output_dir, + "data_dir": data_dir, + "no_eval": no_eval, + "run_benchmark_app": run_benchmark_app, + "batch_size": batch_size, + } ) pipeline: BaseTestPipeline = pipeline_cls(**pipeline_kwargs) pipeline.run() diff --git a/tests/post_training/test_templates/helpers.py b/tests/post_training/test_templates/helpers.py index 885eb6c4263..f36700e8633 100644 --- a/tests/post_training/test_templates/helpers.py +++ b/tests/post_training/test_templates/helpers.py @@ -30,9 +30,14 @@ class StaticDatasetMock: to convert data to backend specific type. """ - def __init__(self, input_size: Tuple, fn_to_type: Callable = None): + def __init__( + self, + input_size: Tuple, + fn_to_type: Callable = None, + length: int = 1, + ): super().__init__() - self._len = 1 + self._len = length self._input_size = input_size self._fn_to_type = fn_to_type @@ -47,19 +52,19 @@ def __len__(self) -> int: return self._len -def get_static_dataset( - input_size: Tuple, - transform_fn: Callable, - fn_to_type: Callable, -) -> Dataset: +def get_static_dataset(input_size: Tuple, transform_fn: Callable, fn_to_type: Callable, length: int = 1) -> Dataset: """ Create nncf.Dataset for StaticDatasetMock. :param input_size: Size of generated tensors, :param transform_fn: Function to transformation dataset. :param fn_to_type: Function, defaults to None. + :param length: The length of the dataset. :return: Instance of nncf.Dataset for StaticDatasetMock. """ - return Dataset(StaticDatasetMock(input_size, fn_to_type), transform_fn) + return Dataset( + StaticDatasetMock(input_size, fn_to_type, length), + transform_fn, + ) class ConvTestModel(nn.Module): diff --git a/tests/post_training/test_templates/models.py b/tests/post_training/test_templates/models.py index 884e2e0e662..b4bbccedb60 100644 --- a/tests/post_training/test_templates/models.py +++ b/tests/post_training/test_templates/models.py @@ -40,7 +40,14 @@ def __init__( NodeWithType("Output_1", OutputNoopMetatype, layer_attributes=output_layer_attrs), ] node_edges = [("Input_1", "Conv_1"), ("Conv_1", "Output_1")] - original_mock_graph = create_mock_graph(nodes, node_edges) + original_mock_graph = create_mock_graph( + nodes, + node_edges, + ( + {NNCFGraph.ACTIVATION_SHAPE_EDGE_ATTR: (1, 3, 224, 224)}, + {NNCFGraph.ACTIVATION_SHAPE_EDGE_ATTR: (1, 10, 224, 224)}, + ), + ) self.nncf_graph = get_nncf_graph_from_mock_nx_graph(original_mock_graph, nncf_graph_cls) diff --git a/tests/post_training/test_templates/test_channel_alignment.py b/tests/post_training/test_templates/test_channel_alignment.py index 49cb8ffe0c3..f1e0fbf440d 100644 --- a/tests/post_training/test_templates/test_channel_alignment.py +++ b/tests/post_training/test_templates/test_channel_alignment.py @@ -528,7 +528,7 @@ class MockBackend(backend_cls): tensor_collectors = stat_points[0].algorithm_to_tensor_collectors[algorithm._algorithm_key] assert len(tensor_collectors) == 1 assert tensor_collectors[0] == ref_stat_collector - MockBackend.get_statistic_collector.assert_called_once_with((0, 2, 3), 1e-4, ref_subset_size, ref_inplace) + MockBackend.get_statistic_collector.assert_called_once_with((2, 3), 1e-4, ref_subset_size, ref_inplace) target_point = stat_points[0].target_point assert target_point.target_node_name == target_node_name @@ -555,4 +555,4 @@ def test_statistic_collectors(self, inplace_ref, q_ref): assert isinstance(aggr, MedianAggregator) assert aggr.num_samples == num_samples_ref assert aggr._keepdims - assert aggr._aggregation_axes == (0,) + assert aggr._aggregation_axes == (0, 1) diff --git a/tests/post_training/test_templates/test_min_max.py b/tests/post_training/test_templates/test_min_max.py new file mode 100644 index 00000000000..188296511b6 --- /dev/null +++ b/tests/post_training/test_templates/test_min_max.py @@ -0,0 +1,168 @@ +# Copyright (c) 2024 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from abc import abstractmethod +from typing import Tuple + +import pytest + +from nncf.common.graph.graph import NNCFGraph +from nncf.common.graph.graph import NNCFNode +from nncf.common.graph.layer_attributes import BaseLayerAttributes +from nncf.common.graph.transformations.commands import TargetPoint +from nncf.common.graph.transformations.commands import TargetType +from nncf.quantization.algorithms.min_max.backend import MinMaxAlgoBackend + +CONV_WEIGHT_SHAPE = (3, 10, 4, 4) +DEPTHWISECONV_WEIGHT_SHAPE = (5, 10, 20, 7, 7) +MATMUL_WEIGHT_SHAPE = (2, 4) + + +class TemplateTestMinMaxAlgorithm: + @property + @abstractmethod + def backend(self) -> MinMaxAlgoBackend: + """ + Get backend specific BiasCorrectionAlgoBackend + + :return BiasCorrectionAlgoBackend: Backend specific BiasCorrectionAlgoBackend + """ + + @property + @abstractmethod + def conv_metatype(self): + "Backend specific Convolution metatype." + + @property + @abstractmethod + def create_target_point(self, target_point_type: TargetType, name: str, port_id: int) -> TargetPoint: + "Creates backend specific TargetPoint." + + +class TemplateTestGetTargetPointShape(TemplateTestMinMaxAlgorithm): + @abstractmethod + def get_nncf_graph(self, weight_port_id: int, weight_shape: Tuple[int]) -> NNCFGraph: + "Returns backend specific NNCFGraph having a single Convloution." + + @pytest.mark.parametrize( + "target_point_type, input_port_id, reference_shape", + ( + (TargetType.PRE_LAYER_OPERATION, 0, (1, 3, 224, 224)), + (TargetType.POST_LAYER_OPERATION, 0, (1, 10, 224, 224)), + (TargetType.OPERATION_WITH_WEIGHTS, 1, (3, 10, 4, 4)), + ), + ) + def test_get_target_point_shape( + self, target_point_type: TargetType, input_port_id: int, reference_shape: Tuple[int] + ): + nncf_graph = self.get_nncf_graph(input_port_id, CONV_WEIGHT_SHAPE) + nodes = nncf_graph.get_nodes_by_metatypes((self.conv_metatype,)) + assert len(nodes) == 1 + node = nodes.pop() + target_point = self.create_target_point(target_point_type, node.node_name, input_port_id) + assert self.backend().get_target_point_shape(nncf_graph, node, target_point) == reference_shape + + +class TemplateTestGetChannelAxes(TemplateTestMinMaxAlgorithm): + @property + @abstractmethod + def depthwiseconv_metatype(self): + "Backend specific Depthwise convolution metatype." + + @property + @abstractmethod + def matmul_metatype(self): + "Backend specific MatMul metatype." + + @staticmethod + @abstractmethod + def get_conv_node_attrs(weight_port_id: int, weight_shape: Tuple[int]) -> BaseLayerAttributes: + "Returns backend specific layer attributes for Convolution." + + @staticmethod + @abstractmethod + def get_depthwiseconv_node_attrs(weight_port_id: int, weight_shape: Tuple[int]) -> BaseLayerAttributes: + "Returns backend specific layer attributes for Convolution." + + @staticmethod + @abstractmethod + def get_matmul_node_attrs( + weight_port_id: int, transpose_weight: Tuple[int], weight_shape: Tuple[int] + ) -> BaseLayerAttributes: + "Returns backend specific layer attributes for MatMul." + + @pytest.mark.parametrize( + "conv_shape, weight_port_id, ref_axes", ((CONV_WEIGHT_SHAPE, 0, (0,)), (CONV_WEIGHT_SHAPE, 1, (0,))) + ) + def test_get_channel_axes_conv_node(self, conv_shape, weight_port_id, ref_axes): + """ + Checks Convolution quantization axes in MinMax for OV, ONNX and Torch. + """ + conv_node = NNCFNode({"metatype": self.conv_metatype}) + conv_node.layer_attributes = self.get_conv_node_attrs(weight_port_id, conv_shape) + target_point = self.create_target_point(TargetType.PRE_LAYER_OPERATION, None, weight_port_id) + assert self.backend().get_weight_quantization_axes(conv_node, target_point) == ref_axes + + @pytest.mark.parametrize( + "conv_shape, weight_port_id, ref_axes", + ((DEPTHWISECONV_WEIGHT_SHAPE, 0, (0,)), (DEPTHWISECONV_WEIGHT_SHAPE, 1, (0,))), + ) + def test_get_channel_axes_deptwiseconv_node_onnx_torch(self, conv_shape, weight_port_id, ref_axes): + """ + Checks Depthwise convolution quantization axes in MinMax for ONNX and Torch. + """ + conv_node = NNCFNode({"metatype": self.depthwiseconv_metatype}) + conv_node.layer_attributes = self.get_depthwiseconv_node_attrs(weight_port_id, conv_shape) + target_point = self.create_target_point(TargetType.PRE_LAYER_OPERATION, None, weight_port_id) + assert self.backend().get_weight_quantization_axes(conv_node, target_point) == ref_axes + + @pytest.mark.parametrize( + "conv_shape, weight_port_id, ref_axes", + ((DEPTHWISECONV_WEIGHT_SHAPE, 0, (0, 1)), (DEPTHWISECONV_WEIGHT_SHAPE, 1, (0, 1))), + ) + def test_get_channel_axes_deptwiseconv_node_ov(self, conv_shape, weight_port_id, ref_axes): + """ + Checks Depthwise convolution quantization axes in MinMax for OV. + """ + conv_node = NNCFNode({"metatype": self.depthwiseconv_metatype}) + conv_node.layer_attributes = self.get_depthwiseconv_node_attrs(weight_port_id, conv_shape) + target_point = self.create_target_point(TargetType.PRE_LAYER_OPERATION, None, weight_port_id) + assert self.backend().get_weight_quantization_axes(conv_node, target_point) == ref_axes + + @pytest.mark.parametrize( + "weight_shape, weight_port_id, transpose_weight, ref_axes", + ( + (MATMUL_WEIGHT_SHAPE, 1, False, (1,)), + (MATMUL_WEIGHT_SHAPE, 1, True, (0,)), + (MATMUL_WEIGHT_SHAPE, 0, True, (1,)), + (MATMUL_WEIGHT_SHAPE, 0, False, (0,)), + ), + ) + def test_get_channel_axes_matmul_node_ov_onnx(self, weight_shape, weight_port_id, transpose_weight, ref_axes): + """ + Checks MatMul quantization axes in MinMax for OV and ONNX. + """ + matmul_node = NNCFNode({"metatype": self.matmul_metatype}) + matmul_node.layer_attributes = self.get_matmul_node_attrs(weight_port_id, transpose_weight, weight_shape) + target_point = self.create_target_point(TargetType.PRE_LAYER_OPERATION, None, weight_port_id) + assert self.backend().get_weight_quantization_axes(matmul_node, target_point) == ref_axes + + @pytest.mark.parametrize( + "weight_shape, ref_axes", + # Torch has strict specification - weight has the following layout: [C_OUT, C_IN] + ((MATMUL_WEIGHT_SHAPE, (0,)),), + ) + def test_get_channel_axes_matmul_torch(self, weight_shape, ref_axes): + """ + Checks MatMul quantization axes in MinMax for Torch. + """ + matmul_node = NNCFNode({"metatype": self.matmul_metatype}) + matmul_node.layer_attributes = self.get_matmul_node_attrs(None, None, weight_shape) + assert self.backend().get_weight_quantization_axes(matmul_node, "dummy") == ref_axes diff --git a/tests/post_training/test_templates/test_ptq_params.py b/tests/post_training/test_templates/test_ptq_params.py index c7a0e45799c..f56db68f938 100644 --- a/tests/post_training/test_templates/test_ptq_params.py +++ b/tests/post_training/test_templates/test_ptq_params.py @@ -16,6 +16,7 @@ import pytest import nncf +from nncf.common.graph.graph import NNCFGraph from nncf.common.graph.operator_metatypes import InputNoopMetatype from nncf.common.graph.operator_metatypes import OperatorMetatype from nncf.common.graph.operator_metatypes import OutputNoopMetatype @@ -37,6 +38,7 @@ from nncf.quantization.passes import transform_to_inference_graph from nncf.quantization.range_estimator import RangeEstimatorParametersSet from nncf.scopes import IgnoredScope +from tests.common.quantization.metatypes import CatTestMetatype from tests.common.quantization.metatypes import Conv2dTestMetatype from tests.common.quantization.metatypes import IdentityTestMetatype from tests.common.quantization.metatypes import LinearTestMetatype @@ -91,6 +93,50 @@ def __init__(self, metatypes: Dict[TestMetatype, OperatorMetatype]): self.weight_quantization_target_point_names.append(node.node_name) +class ModelWithUnifiedScales: + # Input_1 + # / | \ + # Conv_1 Conv_2 Conv_3 + # \ | / + # Cat_1 + # | + # Output_1 + + def __init__(self, metatypes: Dict[TestMetatype, OperatorMetatype], nncf_graph_cls=NNCFGraph): + nodes = [ + NodeWithType("Input_1", InputNoopMetatype), + NodeWithType("Conv_1", metatypes[Conv2dTestMetatype]), + NodeWithType("Conv_2", metatypes[Conv2dTestMetatype]), + NodeWithType("Conv_3", metatypes[Conv2dTestMetatype]), + NodeWithType("Cat_1", metatypes[CatTestMetatype]), + NodeWithType("Output_1", OutputNoopMetatype), + ] + node_edges = [ + ("Input_1", "Conv_1"), + ("Input_1", "Conv_2"), + ("Input_1", "Conv_3"), + ("Conv_1", "Cat_1"), + ("Conv_2", "Cat_1"), + ("Conv_3", "Cat_1"), + ("Cat_1", "Output_1"), + ] + original_mock_graph = create_mock_graph(nodes, node_edges) + self.nncf_graph = get_nncf_graph_from_mock_nx_graph(original_mock_graph, nncf_graph_cls=nncf_graph_cls) + + +class DummyMinMaxTensorStatistic(MinMaxTensorStatistic): + def tensor_eq(self): + return True + + +class DummyMinMaxTensorCollector: + def __init__(self, min_val, max_val): + self._stat = DummyMinMaxTensorStatistic(min_values=min_val, max_values=max_val) + + def get_statistics(self): + return self._stat + + class TemplateTestPTQParams: @abstractmethod def get_algo_backend(self): @@ -112,6 +158,13 @@ def check_is_mean_min_max_statistic_collector(self, tensor_collector: TensorColl def check_quantize_outputs_fq_num(self, quantize_outputs, act_num_q, weight_num_q): pass + @abstractmethod + def check_unified_scale_layout(self, layout, unified_scales_group): + """ + Checks that given transfromation layout and unified_scales_group target points + are correspond to each other and to the test params + """ + @abstractmethod @pytest.fixture(scope="session") def test_params(self): @@ -131,6 +184,15 @@ def target_point(self, target_type: TargetType, target_node_name: str, port_id: def metatypes_mapping(self): pass + @property + @abstractmethod + def nncf_graph_cls(self): + pass + + @abstractmethod + def get_backend_tensor(self, value): + pass + @pytest.mark.parametrize( "range_estimator_params", [RangeEstimatorParametersSet.MINMAX, RangeEstimatorParametersSet.MEAN_MINMAX, None] ) @@ -282,6 +344,35 @@ def test_quantization_points_overflow_fix(self, overflow_fix, affected_target_po ) assert Counter([t_p.target_node_name for t_p in target_points_overflow_fix]) == Counter(affected_target_points) + def test_unified_scales_command_creation(self, mocker): + model = ModelWithUnifiedScales(self.metatypes_mapping, self.nncf_graph_cls) + algo = MinMaxQuantization() + algo._backend_entity = self.get_algo_backend() + # Imitating solver quantization setup building + q_tp_vs_qcf = {} + unified_scales_group = [] + for i in range(1, 4): + tp = self.target_point(TargetType.POST_LAYER_OPERATION, f"/Conv_{i}_0", port_id=0) + q_tp_vs_qcf[tp] = QuantizerConfig() + unified_scales_group.append(tp) + + algo._quantization_target_points_to_qconfig = q_tp_vs_qcf + algo._unified_scale_groups = [unified_scales_group] + + mock_transformer = mocker.MagicMock() + mocker.patch( + "nncf.quantization.algorithms.min_max.algorithm.ModelTransformerFactory.create", + return_value=mock_transformer, + ) + stats = StatisticPointsContainer() + for idx, tp in enumerate(unified_scales_group): + tc = DummyMinMaxTensorCollector(self.get_backend_tensor(idx - 1), self.get_backend_tensor(idx + 2)) + stats.add_statistic_point(StatisticPoint(tp, tc, algo._algorithm_key)) + algo.apply(model, model.nncf_graph, stats) + mock_transformer.transform.assert_called_once() + layout = mock_transformer.transform.call_args.args[0] + self.check_unified_scale_layout(layout, unified_scales_group) + @pytest.mark.parametrize("validate_scopes", (True, False)) def test_validate_scope(self, test_params, validate_scopes): nncf_graph = test_params["test_model_type_pass"]["nncf_graph"] @@ -308,20 +399,14 @@ def test_empty_statistics(self, mode, mocker): target_point = self.target_point(TargetType.PRE_LAYER_OPERATION, "A", 0) stat_points = StatisticPointsContainer() - class DummyMinMaxTensorStatistic(MinMaxTensorStatistic): - def tensor_eq(self): - return True - - class EmptyTensorCollector: - def get_statistics(self): - return DummyMinMaxTensorStatistic(None, None) - dummy_tp = {target_point: QuantizerConfig()} if mode == "target_point": dummy_tps = (dummy_tp, {}) else: dummy_tps = ({}, ((target_point,),)) - stat_points.add_statistic_point(StatisticPoint(target_point, EmptyTensorCollector(), algo._algorithm_key)) + stat_points.add_statistic_point( + StatisticPoint(target_point, DummyMinMaxTensorCollector(None, None), algo._algorithm_key) + ) mocker.patch("nncf.common.factory.ModelTransformerFactory.create", return_value=mocker.MagicMock()) mocker.patch( "nncf.quantization.algorithms.min_max.algorithm.MinMaxQuantization._get_quantization_target_points", diff --git a/tests/post_training/test_templates/test_quantizer_config.py b/tests/post_training/test_templates/test_quantizer_config.py index df98648d1a2..f369b6fcd7b 100644 --- a/tests/post_training/test_templates/test_quantizer_config.py +++ b/tests/post_training/test_templates/test_quantizer_config.py @@ -82,13 +82,48 @@ def conv_sum_aggregation_nncf_graph(self) -> NNCFGraphToTestSumAggregation: class TestGetStatisticsCollectorParameters: target_type: TargetType target_node_name: str + batchwise_statistics: bool ref_per_ch_reduction_axes: List[int] ref_per_tensor_reduction_axes: List[int] - @abstractmethod - @pytest.fixture + @pytest.fixture( + params=[ + pytest.param( + TestGetStatisticsCollectorParameters(TargetType.PRE_LAYER_OPERATION, "/Sum_1_0", True, (2,), (1, 2)), + ), + TestGetStatisticsCollectorParameters( + TargetType.POST_LAYER_OPERATION, + "/Conv_1_0", + True, + (2, 3), + (1, 2, 3), + ), + TestGetStatisticsCollectorParameters( + TargetType.OPERATION_WITH_WEIGHTS, + "/Conv_1_0", + True, + (1, 2, 3), + (0, 1, 2, 3), + ), + TestGetStatisticsCollectorParameters(TargetType.PRE_LAYER_OPERATION, "/Sum_1_0", False, (0, 2), (0, 1, 2)), + TestGetStatisticsCollectorParameters( + TargetType.POST_LAYER_OPERATION, + "/Conv_1_0", + False, + (0, 2, 3), + (0, 1, 2, 3), + ), + TestGetStatisticsCollectorParameters( + TargetType.OPERATION_WITH_WEIGHTS, + "/Conv_1_0", + False, + (1, 2, 3), + (0, 1, 2, 3), + ), + ] + ) def statistic_collector_parameters(self, request) -> TestGetStatisticsCollectorParameters: - pass + return request.param def test_default_quantizer_config(self, single_conv_nncf_graph): min_max_algo = MinMaxQuantization() @@ -231,7 +266,9 @@ def test_get_stat_collector( statistic_collector_parameters: TestGetStatisticsCollectorParameters, ): params = statistic_collector_parameters - min_max_algo = MinMaxQuantization(activations_range_estimator_params=range_estimator_params) + min_max_algo = MinMaxQuantization( + subset_size=num_samples, activations_range_estimator_params=range_estimator_params + ) min_max_algo._backend_entity = self.get_algo_backend() q_config = QuantizerConfig(num_bits=8, mode=q_config_mode, per_channel=q_config_per_channel) @@ -247,7 +284,7 @@ def test_get_stat_collector( target_point = list(min_max_algo._quantization_target_points_to_qconfig.keys())[0] tensor_collector = min_max_algo._get_stat_collector( - conv_sum_aggregation_nncf_graph.nncf_graph, target_point, q_config, num_samples + conv_sum_aggregation_nncf_graph.nncf_graph, target_point, q_config, params.batchwise_statistics ) is_weight_tp = target_point.is_weight_target_point() @@ -284,5 +321,7 @@ def test_get_stat_collector( assert self.get_reduction_axes(reducer) == params.ref_per_ch_reduction_axes else: assert self.get_reduction_axes(reducer) == params.ref_per_tensor_reduction_axes - - assert tensor_collector.num_samples == num_samples + if is_weight_tp: + assert tensor_collector.num_samples == 1 + else: + assert tensor_collector.num_samples == num_samples diff --git a/tests/shared/command.py b/tests/shared/command.py index 65c07e0f89a..bad708bdea5 100644 --- a/tests/shared/command.py +++ b/tests/shared/command.py @@ -70,7 +70,7 @@ def target(): self.output = [] for line in self.process.stdout: - line = line.decode("utf-8") + line = line.decode("utf-8", errors="ignore") self.output.append(line) if stdout: sys.stdout.write(line) diff --git a/tests/shared/test_templates/template_test_nncf_tensor.py b/tests/shared/test_templates/template_test_nncf_tensor.py index 2005e6c03bc..382ea2cbef9 100644 --- a/tests/shared/test_templates/template_test_nncf_tensor.py +++ b/tests/shared/test_templates/template_test_nncf_tensor.py @@ -354,11 +354,9 @@ def test_getitem(self): def test_iter(self): arr = [0, 1, 2] nncf_tensor = Tensor(self.to_tensor(arr)) - i = 0 - for x in nncf_tensor: + for i, x in enumerate(nncf_tensor): assert x == arr[i] assert isinstance(x, Tensor) - i += 1 # Math diff --git a/tests/tensorflow/sota_checkpoints_eval.json b/tests/tensorflow/sota_checkpoints_eval.json index 6191979fa25..70fa3f07672 100644 --- a/tests/tensorflow/sota_checkpoints_eval.json +++ b/tests/tensorflow/sota_checkpoints_eval.json @@ -306,7 +306,7 @@ "model_description": "RetinaNet", "compression_description": "INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations) + filter pruning 40%", "batch_per_gpu": 15, - "target_tf": 32.67, + "target_tf": 32.61, "target_ov": 32.47 }, "yolo_v4_coco": { diff --git a/tests/torch/data/reference_graphs/quantized/ptq/symmetric/inception_v3.dot b/tests/torch/data/reference_graphs/quantized/ptq/symmetric/inception_v3.dot index 3eca81edfce..1082c9c5847 100644 --- a/tests/torch/data/reference_graphs/quantized/ptq/symmetric/inception_v3.dot +++ b/tests/torch/data/reference_graphs/quantized/ptq/symmetric/inception_v3.dot @@ -6,19 +6,19 @@ strict digraph { "4 Inception3/__mul___0" [id=4, type=__mul__]; "5 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__mul___0|OUTPUT]/symmetric_quantize_0" [id=5, type=symmetric_quantize]; "6 Inception3/__add___0" [id=6, type=__add__]; -"7 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__add___0|OUTPUT]/symmetric_quantize_0" [id=7, type=symmetric_quantize]; +"7 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__add___0|OUTPUT;Inception3/__add___1|OUTPUT;Inception3/__add___2|OUTPUT]/symmetric_quantize_0" [id=7, type=symmetric_quantize]; "8 Inception3/__getitem___1" [id=8, type=__getitem__]; "9 Inception3/unsqueeze_1" [id=9, type=unsqueeze]; "10 Inception3/__mul___1" [id=10, type=__mul__]; "11 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__mul___1|OUTPUT]/symmetric_quantize_0" [id=11, type=symmetric_quantize]; "12 Inception3/__add___1" [id=12, type=__add__]; -"13 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__add___1|OUTPUT]/symmetric_quantize_0" [id=13, type=symmetric_quantize]; +"13 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__add___0|OUTPUT;Inception3/__add___1|OUTPUT;Inception3/__add___2|OUTPUT]/symmetric_quantize_1" [id=13, type=symmetric_quantize]; "14 Inception3/__getitem___2" [id=14, type=__getitem__]; "15 Inception3/unsqueeze_2" [id=15, type=unsqueeze]; "16 Inception3/__mul___2" [id=16, type=__mul__]; "17 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__mul___2|OUTPUT]/symmetric_quantize_0" [id=17, type=symmetric_quantize]; "18 Inception3/__add___2" [id=18, type=__add__]; -"19 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__add___2|OUTPUT]/symmetric_quantize_0" [id=19, type=symmetric_quantize]; +"19 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__add___0|OUTPUT;Inception3/__add___1|OUTPUT;Inception3/__add___2|OUTPUT]/symmetric_quantize_2" [id=19, type=symmetric_quantize]; "20 Inception3/cat_0" [id=20, type=cat]; "21 Inception3/BasicConv2d[Conv2d_1a_3x3]/NNCFConv2d[conv]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=21, type=symmetric_quantize]; "22 Inception3/BasicConv2d[Conv2d_1a_3x3]/NNCFConv2d[conv]/conv2d_0" [id=22, type=conv2d]; @@ -542,20 +542,20 @@ strict digraph { "3 Inception3/unsqueeze_0" -> "4 Inception3/__mul___0"; "4 Inception3/__mul___0" -> "5 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__mul___0|OUTPUT]/symmetric_quantize_0"; "5 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__mul___0|OUTPUT]/symmetric_quantize_0" -> "6 Inception3/__add___0"; -"6 Inception3/__add___0" -> "7 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__add___0|OUTPUT]/symmetric_quantize_0"; -"7 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__add___0|OUTPUT]/symmetric_quantize_0" -> "20 Inception3/cat_0"; +"6 Inception3/__add___0" -> "7 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__add___0|OUTPUT;Inception3/__add___1|OUTPUT;Inception3/__add___2|OUTPUT]/symmetric_quantize_0"; +"7 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__add___0|OUTPUT;Inception3/__add___1|OUTPUT;Inception3/__add___2|OUTPUT]/symmetric_quantize_0" -> "20 Inception3/cat_0"; "8 Inception3/__getitem___1" -> "9 Inception3/unsqueeze_1"; "9 Inception3/unsqueeze_1" -> "10 Inception3/__mul___1"; "10 Inception3/__mul___1" -> "11 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__mul___1|OUTPUT]/symmetric_quantize_0"; "11 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__mul___1|OUTPUT]/symmetric_quantize_0" -> "12 Inception3/__add___1"; -"12 Inception3/__add___1" -> "13 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__add___1|OUTPUT]/symmetric_quantize_0"; -"13 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__add___1|OUTPUT]/symmetric_quantize_0" -> "20 Inception3/cat_0"; +"12 Inception3/__add___1" -> "13 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__add___0|OUTPUT;Inception3/__add___1|OUTPUT;Inception3/__add___2|OUTPUT]/symmetric_quantize_1"; +"13 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__add___0|OUTPUT;Inception3/__add___1|OUTPUT;Inception3/__add___2|OUTPUT]/symmetric_quantize_1" -> "20 Inception3/cat_0"; "14 Inception3/__getitem___2" -> "15 Inception3/unsqueeze_2"; "15 Inception3/unsqueeze_2" -> "16 Inception3/__mul___2"; "16 Inception3/__mul___2" -> "17 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__mul___2|OUTPUT]/symmetric_quantize_0"; "17 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__mul___2|OUTPUT]/symmetric_quantize_0" -> "18 Inception3/__add___2"; -"18 Inception3/__add___2" -> "19 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__add___2|OUTPUT]/symmetric_quantize_0"; -"19 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__add___2|OUTPUT]/symmetric_quantize_0" -> "20 Inception3/cat_0"; +"18 Inception3/__add___2" -> "19 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__add___0|OUTPUT;Inception3/__add___1|OUTPUT;Inception3/__add___2|OUTPUT]/symmetric_quantize_2"; +"19 Inception3/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[Inception3/__add___0|OUTPUT;Inception3/__add___1|OUTPUT;Inception3/__add___2|OUTPUT]/symmetric_quantize_2" -> "20 Inception3/cat_0"; "20 Inception3/cat_0" -> "22 Inception3/BasicConv2d[Conv2d_1a_3x3]/NNCFConv2d[conv]/conv2d_0"; "21 Inception3/BasicConv2d[Conv2d_1a_3x3]/NNCFConv2d[conv]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "22 Inception3/BasicConv2d[Conv2d_1a_3x3]/NNCFConv2d[conv]/conv2d_0"; "22 Inception3/BasicConv2d[Conv2d_1a_3x3]/NNCFConv2d[conv]/conv2d_0" -> "23 Inception3/BasicConv2d[Conv2d_1a_3x3]/NNCFBatchNorm2d[bn]/batch_norm_0"; diff --git a/tests/torch/nas/test_search.py b/tests/torch/nas/test_search.py index e717f3e4453..931dbe1ff79 100644 --- a/tests/torch/nas/test_search.py +++ b/tests/torch/nas/test_search.py @@ -273,7 +273,7 @@ def validate_model_fn(model, eval_datasets): ) max_subnetwork_acc = validate_model_fn(model, eval_datasets) - _, best_config, performance_metrics = search.run(validate_model_fn, eval_datasets, tmp_path) + _, _, performance_metrics = search.run(validate_model_fn, eval_datasets, tmp_path) assert max_subnetwork_acc == search_result_descriptors.expected_accuracy assert performance_metrics[1] == search_result_descriptors.subnet_expected_accuracy[search_algo_name] diff --git a/tests/torch/ptq/test_graphs.py b/tests/torch/ptq/test_graphs.py index 9061c01e0da..d441b5b04e1 100644 --- a/tests/torch/ptq/test_graphs.py +++ b/tests/torch/ptq/test_graphs.py @@ -20,6 +20,7 @@ from nncf.torch.layers import NNCF_RNN from nncf.torch.layers import LSTMCellNNCF from tests.post_training.test_templates.helpers import EmbeddingModel +from tests.post_training.test_templates.helpers import get_static_dataset from tests.torch import test_models from tests.torch.ptq.helpers import get_nncf_network from tests.torch.ptq.helpers import mock_collect_statistics @@ -100,6 +101,8 @@ def test_min_max_classification_quantized_graphs(desc: ModelDesc, quantization_p quantization_parameters["advanced_parameters"] = AdvancedQuantizationParameters(disable_bias_correction=True) quantization_algorithm = PostTrainingQuantization(**quantization_parameters) - quantized_model = quantization_algorithm.apply(nncf_network, nncf_network.nncf.get_graph(), dataset=None) + quantized_model = quantization_algorithm.apply( + nncf_network, nncf_network.nncf.get_graph(), dataset=get_static_dataset(desc.input_sample_sizes, None, None) + ) check_graph(quantized_model.nncf.get_graph(), desc.dot_filename(), graph_dir) diff --git a/tests/torch/ptq/test_min_max.py b/tests/torch/ptq/test_min_max.py new file mode 100644 index 00000000000..c57c82be429 --- /dev/null +++ b/tests/torch/ptq/test_min_max.py @@ -0,0 +1,108 @@ +# Copyright (c) 2024 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Tuple + +import pytest + +from nncf.common.graph.graph import NNCFGraph +from nncf.common.graph.layer_attributes import ConvolutionLayerAttributes +from nncf.common.graph.layer_attributes import LinearLayerAttributes +from nncf.common.graph.transformations.commands import TargetType +from nncf.quantization.algorithms.min_max.backend import MinMaxAlgoBackend +from nncf.quantization.algorithms.min_max.torch_backend import PTMinMaxAlgoBackend +from nncf.torch.graph.graph import PTNNCFGraph +from nncf.torch.graph.operator_metatypes import PTConv2dMetatype +from nncf.torch.graph.operator_metatypes import PTDepthwiseConv2dSubtype +from nncf.torch.graph.operator_metatypes import PTLinearMetatype +from nncf.torch.graph.transformations.commands import PTTargetPoint +from tests.post_training.test_templates.models import NNCFGraphToTest +from tests.post_training.test_templates.test_min_max import TemplateTestGetChannelAxes +from tests.post_training.test_templates.test_min_max import TemplateTestGetTargetPointShape +from tests.post_training.test_templates.test_min_max import TemplateTestMinMaxAlgorithm + + +class TestTorchMinMaxAlgorithm(TemplateTestMinMaxAlgorithm): + @property + def backend(self) -> MinMaxAlgoBackend: + return PTMinMaxAlgoBackend + + @property + def conv_metatype(self): + return PTConv2dMetatype + + def create_target_point(self, target_point_type: TargetType, name: str, port_id: int) -> PTTargetPoint: + if target_point_type == TargetType.POST_LAYER_OPERATION: + port_id = None + return PTTargetPoint(target_point_type, name, input_port_id=port_id) + + +class TestOVGetTargetPointShape(TemplateTestGetTargetPointShape, TestTorchMinMaxAlgorithm): + def get_nncf_graph(self, weight_port_id: int, weight_shape: Tuple[int]) -> NNCFGraph: + conv_layer_attrs = ConvolutionLayerAttributes( + weight_requires_grad=True, + in_channels=weight_shape[1], + out_channels=weight_shape[0], + kernel_size=weight_shape[2:], + stride=1, + dilations=1, + groups=1, + transpose=False, + padding_values=[], + ) + return NNCFGraphToTest(PTConv2dMetatype, conv_layer_attrs, PTNNCFGraph).nncf_graph + + +class TestTorchGetChannelAxes(TemplateTestGetChannelAxes, TestTorchMinMaxAlgorithm): + @property + def depthwiseconv_metatype(self): + return PTDepthwiseConv2dSubtype + + @property + def matmul_metatype(self): + return PTLinearMetatype + + @staticmethod + def get_conv_node_attrs(weight_port_id: int, weight_shape: Tuple[int]) -> ConvolutionLayerAttributes: + return ConvolutionLayerAttributes( + weight_requires_grad=False, + in_channels=weight_shape[0], + out_channels=weight_shape[1], + kernel_size=weight_shape[2:], + stride=1, + dilations=1, + groups=1, + transpose=False, + padding_values=[], + ) + + @staticmethod + def get_depthwiseconv_node_attrs(weight_port_id: int, weight_shape: Tuple[int]) -> ConvolutionLayerAttributes: + return ConvolutionLayerAttributes( + weight_requires_grad=False, + in_channels=weight_shape[1], + out_channels=weight_shape[2], + kernel_size=weight_shape[3:], + stride=1, + dilations=1, + groups=weight_shape[0], + transpose=False, + padding_values=[], + ) + + @staticmethod + def get_matmul_node_attrs(weight_port_id: int, transpose_weight: Tuple[int], weight_shape: Tuple[int]): + return LinearLayerAttributes(False, in_features=weight_shape[0], out_features=weight_shape[1]) + + def test_get_channel_axes_matmul_node_ov_onnx(self): + pytest.skip("Test is not applied for Torch backend.") + + def test_get_channel_axes_deptwiseconv_node_ov(self): + pytest.skip("Test is not applied for Torch backend.") diff --git a/tests/torch/ptq/test_ptq_params.py b/tests/torch/ptq/test_ptq_params.py index 8ebdb1f1b2d..88a691c47fc 100644 --- a/tests/torch/ptq/test_ptq_params.py +++ b/tests/torch/ptq/test_ptq_params.py @@ -10,20 +10,26 @@ # limitations under the License. import pytest +import torch from torch import nn from nncf.common.graph.patterns import GraphPattern from nncf.common.graph.patterns.manager import PatternsManager from nncf.common.graph.transformations.commands import TargetType +from nncf.common.graph.transformations.commands import TransformationType from nncf.common.utils.backend import BackendType from nncf.parameters import TargetDevice from nncf.quantization.algorithms.min_max.algorithm import MinMaxQuantization from nncf.quantization.algorithms.min_max.torch_backend import PTMinMaxAlgoBackend from nncf.scopes import IgnoredScope +from nncf.torch.graph.graph import PTNNCFGraph from nncf.torch.graph.graph import PTTargetPoint +from nncf.torch.graph.operator_metatypes import PTCatMetatype from nncf.torch.graph.operator_metatypes import PTModuleConv2dMetatype from nncf.torch.graph.operator_metatypes import PTModuleLinearMetatype from nncf.torch.graph.operator_metatypes import PTSoftmaxMetatype +from nncf.torch.graph.transformations.commands import PTSharedFnInsertionCommand +from tests.common.quantization.metatypes import CatTestMetatype from tests.common.quantization.metatypes import Conv2dTestMetatype from tests.common.quantization.metatypes import LinearTestMetatype from tests.common.quantization.metatypes import SoftmaxTestMetatype @@ -97,17 +103,34 @@ def check_quantize_outputs_fq_num(self, quantize_outputs, act_num_q, weight_num_ assert act_num_q == 1 assert weight_num_q == 1 + def check_unified_scale_layout(self, layout, unified_scale_group): + assert len(layout.transformations) == 1 + command = layout.transformations[0] + assert isinstance(command, PTSharedFnInsertionCommand) + assert command.op_name == "/Conv_1_0|INPUT0;/Conv_2_0|INPUT0;/Conv_3_0|INPUT0" + assert command.target_points == unified_scale_group + assert torch.allclose(command.fn.scale, torch.tensor(4.0)) + assert command.type == TransformationType.INSERT + def target_point(self, target_type: TargetType, target_node_name: str, port_id: int) -> PTTargetPoint: return PTTargetPoint(target_type, target_node_name, input_port_id=port_id) + def get_backend_tensor(self, value): + return torch.tensor(value) + @property def metatypes_mapping(self): return { Conv2dTestMetatype: PTModuleConv2dMetatype, LinearTestMetatype: PTModuleLinearMetatype, SoftmaxTestMetatype: PTSoftmaxMetatype, + CatTestMetatype: PTCatMetatype, } + @property + def nncf_graph_cls(self): + return PTNNCFGraph + @pytest.fixture(scope="session") def test_params(self): linear_model = LinearTestModel().get_nncf_network() diff --git a/tests/torch/ptq/test_quantizer_config.py b/tests/torch/ptq/test_quantizer_config.py index 4c80c5c10d0..2ce6fc4d177 100644 --- a/tests/torch/ptq/test_quantizer_config.py +++ b/tests/torch/ptq/test_quantizer_config.py @@ -11,7 +11,6 @@ import pytest -from nncf.common.graph.transformations.commands import TargetType from nncf.quantization.algorithms.min_max.torch_backend import PTMinMaxAlgoBackend from tests.post_training.test_templates.models import NNCFGraphToTest from tests.post_training.test_templates.models import NNCFGraphToTestDepthwiseConv @@ -21,23 +20,11 @@ from tests.torch.ptq.helpers import get_single_conv_nncf_graph from tests.torch.ptq.helpers import get_sum_aggregation_nncf_graph -ParamsCls = TemplateTestQuantizerConfig.TestGetStatisticsCollectorParameters - class TestQuantizerConfig(TemplateTestQuantizerConfig): def get_algo_backend(self): return PTMinMaxAlgoBackend() - @pytest.fixture( - params=[ - (TargetType.PRE_LAYER_OPERATION, "/Sum_1_0", (0, 2), (0, 1, 2)), - (TargetType.POST_LAYER_OPERATION, "/Conv_1_0", (0, 2, 3), (0, 1, 2, 3)), - (TargetType.OPERATION_WITH_WEIGHTS, "/Conv_1_0", (1, 2, 3), (0, 1, 2, 3)), - ] - ) - def statistic_collector_parameters(self, request) -> ParamsCls: - return ParamsCls(*request.param) - @pytest.fixture def single_conv_nncf_graph(self) -> NNCFGraphToTest: return get_single_conv_nncf_graph() diff --git a/tests/torch/ptq/test_smooth_quant.py b/tests/torch/ptq/test_smooth_quant.py index fa5d0599672..7af4fa98d33 100644 --- a/tests/torch/ptq/test_smooth_quant.py +++ b/tests/torch/ptq/test_smooth_quant.py @@ -21,9 +21,9 @@ from nncf.quantization.algorithms.smooth_quant.torch_backend import SQMultiply from nncf.torch.graph.operator_metatypes import PTModuleConv2dMetatype from nncf.torch.graph.operator_metatypes import PTModuleLinearMetatype +from nncf.torch.graph.transformations.commands import ExtraCompressionModuleType from nncf.torch.graph.transformations.commands import PTSharedFnInsertionCommand from nncf.torch.model_creation import wrap_model -from nncf.torch.nncf_network import ExtraCompressionModuleType from tests.post_training.test_templates.helpers import ConvTestModel from tests.post_training.test_templates.helpers import LinearMultiShapeModel from tests.post_training.test_templates.helpers import ShareWeghtsConvAndShareLinearModel diff --git a/tests/torch/ptq/test_tensor_collector_batch_size.py b/tests/torch/ptq/test_tensor_collector_batch_size.py new file mode 100644 index 00000000000..5beff90e67a --- /dev/null +++ b/tests/torch/ptq/test_tensor_collector_batch_size.py @@ -0,0 +1,51 @@ +# Copyright (c) 2024 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +import pytest +import torch + +from nncf.experimental.common.tensor_statistics.collectors import AGGREGATORS_MAP +from nncf.torch.tensor import PTNNCFTensor +from nncf.torch.tensor_statistics.collectors import PT_REDUCERS_MAP +from nncf.torch.tensor_statistics.collectors import PTNNCFCollectorTensorProcessor +from nncf.torch.tensor_statistics.statistics import PTMinMaxTensorStatistic +from tests.common.experimental.test_tensor_collector_batch_size import TemplateTestTensorCollectorBatchSize + + +class TestTensorCollectorBatchSize(TemplateTestTensorCollectorBatchSize): + @staticmethod + def get_tensor_statistics_class(): + return PTMinMaxTensorStatistic + + @staticmethod + def get_tensor_processor(): + return PTNNCFCollectorTensorProcessor() + + @staticmethod + def get_nncf_tensor_class(): + return PTNNCFTensor + + @pytest.fixture(params=PT_REDUCERS_MAP.values()) + def reducers(self, request) -> bool: + return request.param + + @pytest.fixture(params=AGGREGATORS_MAP.values()) + def aggregators(self, request) -> bool: + return request.param + + @pytest.fixture(params=[False]) + def inplace(self, request): + return request.param + + @staticmethod + def to_backend_tensor(tensor: np.ndarray): + return torch.tensor(tensor) diff --git a/tests/torch/ptq/test_weights_compression.py b/tests/torch/ptq/test_weights_compression.py index c357005a3bd..ba64f1341b8 100644 --- a/tests/torch/ptq/test_weights_compression.py +++ b/tests/torch/ptq/test_weights_compression.py @@ -53,7 +53,7 @@ def forward(self, input_ids): return res -class NestedMatMul(torch.nn.Module): +class MatMulModel(torch.nn.Module): def __init__(self): super().__init__() self.w = torch.nn.Parameter(torch.ones(size=(300, 300), dtype=torch.float32)) @@ -68,7 +68,7 @@ def __init__(self): self.conv_w = torch.nn.Parameter(torch.ones(size=(5, 3, 3, 3), dtype=torch.float32)) self.matmul_w = torch.nn.Parameter(torch.ones(size=(1, 3, 300, 300), dtype=torch.float32)) self.conv_tr_w = torch.nn.Parameter(torch.rand(size=(5, 4, 3, 3))) - self.nested_matmul = NestedMatMul() + self.nested_matmul = MatMulModel() def forward(self, input_): x = input_.to(torch.float32) @@ -241,3 +241,26 @@ def test_get_dtype_attribute_of_parameter(): assert compressed_model.weight.dtype == torch.uint8 compressed_model(dummy_input) assert compressed_model.weight.dtype == torch.uint8 + + +@pytest.mark.parametrize("device", ("cpu", "cuda")) +@pytest.mark.parametrize("dtype", ("float16", "float32")) +def test_model_devices_and_precisions(device, dtype): + if device == "cuda" and not torch.cuda.is_available(): + pytest.skip("Skipping for CPU-only setups") + device = torch.device(device) + dtype = torch.float16 if dtype == "float16" else torch.float32 + + model = MatMulModel().to(device) + if dtype == torch.float16: + model.half() + + dummy_input = torch.rand((1, 300), dtype=dtype, device=device) + wrapped_model = wrap_model(model, example_input=dummy_input, trace_parameters=True) + compressed_model = compress_weights(wrapped_model) + result = compressed_model(dummy_input) + + # Scale should always be in float16 + assert compressed_model.state_dict()["_nncf.external_op.weights_decompressor_w._scale"].dtype == torch.float16 + # Result should be in the precision of the model + assert result.dtype == dtype diff --git a/tests/torch/qat/README.md b/tests/torch/qat/README.md new file mode 100644 index 00000000000..aa7a2bf312a --- /dev/null +++ b/tests/torch/qat/README.md @@ -0,0 +1,17 @@ +# Qantization-aware Training after Post-training Quantization Suite + +This is the test suite based on QAT examples training and validation code that takes all samples quantization configs and applies PTQ to the correspondent models. It compares metrics between original and quantized models and tries to recover metrics by QAT. + +## Installation + +```bash +make install-torch-test +``` + +## Usage + +Once the environment is installed use the following command to run all tests: + +```bash +python -m pytest tests/torch/qat --sota-data-dir /path/to/omz/training/datasets --sota-checkpoints-dir /path/to/compression_training_baselines +``` diff --git a/tests/torch/qat/helpers.py b/tests/torch/qat/helpers.py new file mode 100644 index 00000000000..da6788a6a91 --- /dev/null +++ b/tests/torch/qat/helpers.py @@ -0,0 +1,136 @@ +# Copyright (c) 2024 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import gc +from typing import Any, Dict, Optional +from unittest.mock import MagicMock + +import torch + +from examples.torch.common.example_logger import logger +from examples.torch.common.execution import start_worker +from nncf.common.quantization.structs import QuantizationPreset +from nncf.common.quantization.structs import QuantizationScheme +from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters +from nncf.quantization.advanced_parameters import OverflowFix +from nncf.quantization.advanced_parameters import QuantizationParameters +from nncf.quantization.range_estimator import RangeEstimatorParameters +from nncf.quantization.range_estimator import RangeEstimatorParametersSet +from nncf.torch.nncf_network import NNCFNetwork +from nncf.torch.quantization.layers import BaseQuantizer + + +def convert_quantization_mode(mode: Optional[str]) -> QuantizationScheme: + if mode is None: + return None + + if mode == "symmetric": + return QuantizationScheme.SYMMETRIC + if mode == "asymmetric": + return QuantizationScheme.ASYMMETRIC + raise RuntimeError(f"Unknown quantization mode: {mode}") + + +def convert_quantization_params(conf: Optional[Dict[str, Any]]) -> QuantizationParameters: + if conf is None: + return QuantizationParameters() + + return QuantizationParameters( + num_bits=conf.get("bits", None), + mode=convert_quantization_mode(conf.get("mode", None)), + signedness_to_force=conf.get("signed", None), + per_channel=None # Always use the default parameter for per_channel parameters to prevent + # accuracy degradation due to the fact that per_channel=False for activation will make + # deptwise convolutions activations quantizers work in the per tensor mode + # which does not make sence in case of the CPU target device. + ) + + +def convert_overflow_fix_param(param: Optional[str]) -> OverflowFix: + if param is None: + return OverflowFix.FIRST_LAYER + if param == "enable": + return OverflowFix.ENABLE + if param == "disable": + return OverflowFix.DISABLE + if param == "first_layer_only": + return OverflowFix.FIRST_LAYER + raise RuntimeError(f"Overflow fix param {param} is unknown.") + + +def convert_quantization_preset(preset: str) -> QuantizationPreset: + if preset == "performance": + return QuantizationPreset.PERFORMANCE + if preset == "mixed": + return QuantizationPreset.MIXED + raise RuntimeError(f"Preset {preset} is unknown.") + + +def get_range_init_type(config_quantization_params: Dict[str, Any]) -> RangeEstimatorParameters: + if ( + "initializer" in config_quantization_params + and "range" in config_quantization_params["initializer"] + and "type" in config_quantization_params["initializer"]["range"] + ): + range_init_type = config_quantization_params["initializer"]["range"]["type"] + if range_init_type == "mean_percentile": + return RangeEstimatorParametersSet.MEAN_QUANTILE + logger.info(f"Unknown range init type: {range_init_type}, default range init type is used.") + return RangeEstimatorParametersSet.MINMAX + + +def get_quantization_preset(config_quantization_params: Dict[str, Any]) -> Optional[QuantizationPreset]: + if "preset" not in config_quantization_params: + return None + return convert_quantization_preset(config_quantization_params["preset"]) + + +def get_advanced_ptq_parameters(config_quantization_params: Dict[str, Any]) -> AdvancedQuantizationParameters: + range_estimator_params = get_range_init_type(config_quantization_params) + return AdvancedQuantizationParameters( + overflow_fix=convert_overflow_fix_param(config_quantization_params.get("overflow_fix")), + weights_quantization_params=convert_quantization_params(config_quantization_params.get("weights")), + activations_quantization_params=convert_quantization_params(config_quantization_params.get("activations")), + weights_range_estimator_params=range_estimator_params, + activations_range_estimator_params=range_estimator_params, + ) + + +def get_num_samples(config_quantization_params: Dict[str, Any]) -> int: + if ( + "initializer" in config_quantization_params + and "range" in config_quantization_params["initializer"] + and "num_init_samples" in config_quantization_params["initializer"]["range"] + ): + num_samples = config_quantization_params["initializer"]["range"]["num_init_samples"] + if isinstance(num_samples, int): + return num_samples + return 300 + + +def broadcast_initialized_parameters(quantized_model: NNCFNetwork): + for module in quantized_model.modules(): + if isinstance(module, BaseQuantizer): + module.broadcast_initialized_params() + + +def get_mocked_compression_ctrl(): + compression_ctrl = MagicMock() + compression_ctrl.loss = lambda: 0.0 + compression_ctrl.statistics = lambda *args, **kwargs: [] + return compression_ctrl + + +def start_worker_clean_memory(*args, **kwargs): + result = start_worker(*args, **kwargs) + gc.collect() + torch.cuda.empty_cache() + return result diff --git a/tests/torch/qat/test_qat_classification.py b/tests/torch/qat/test_qat_classification.py new file mode 100644 index 00000000000..394ebfa0071 --- /dev/null +++ b/tests/torch/qat/test_qat_classification.py @@ -0,0 +1,295 @@ +# Copyright (c) 2024 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dataclasses import dataclass +from pathlib import Path +from typing import List + +import pytest +import torch +import torch.utils.data +import torch.utils.data.distributed +from helpers import broadcast_initialized_parameters +from helpers import get_advanced_ptq_parameters +from helpers import get_mocked_compression_ctrl +from helpers import get_num_samples +from helpers import get_quantization_preset +from helpers import start_worker_clean_memory +from torch import nn +from torch.optim.lr_scheduler import ReduceLROnPlateau + +import nncf +from examples.common.sample_config import SampleConfig +from examples.common.sample_config import create_sample_config +from examples.torch.classification.main import create_data_loaders +from examples.torch.classification.main import create_datasets +from examples.torch.classification.main import get_argument_parser +from examples.torch.classification.main import inception_criterion_fn +from examples.torch.classification.main import train_epoch +from examples.torch.classification.main import validate +from examples.torch.common.example_logger import logger +from examples.torch.common.execution import get_execution_mode +from examples.torch.common.execution import prepare_model_for_execution +from examples.torch.common.model_loader import load_model +from examples.torch.common.optimizer import get_parameter_groups +from examples.torch.common.optimizer import make_optimizer +from examples.torch.common.utils import configure_device +from examples.torch.common.utils import configure_logging +from examples.torch.common.utils import is_pretrained_model_requested +from nncf import NNCFConfig +from nncf.common.compression import BaseCompressionAlgorithmController +from nncf.torch.initialization import default_criterion_fn +from nncf.torch.utils import is_main_process +from tests.shared.paths import PROJECT_ROOT + +CONFIGS = list((PROJECT_ROOT / Path("examples/torch/classification/configs/quantization")).glob("*")) + + +def _get_filtered_quantization_configs() -> List[Path]: + configs = [] + for quantization_config_path in CONFIGS: + if "imagenet" not in quantization_config_path.stem: + # Test works only with imagenet models by far + continue + + nncf_config = NNCFConfig.from_json(quantization_config_path) + if "compression" not in nncf_config or nncf_config["compression"]["algorithm"] != "quantization": + # Config without compression + continue + + if "accuracy_aware_training" in nncf_config: + # Accuracy Aware training is not supported yet for QAT with PTQ. + continue + + if "pretrained" not in nncf_config or not nncf_config["pretrained"]: + # Test supports only pretrained models. + continue + configs.append(quantization_config_path) + return configs + + +FILTERED_CONFIGS = _get_filtered_quantization_configs() + + +@pytest.fixture(name="quantization_config_path", params=FILTERED_CONFIGS, ids=[conf.stem for conf in FILTERED_CONFIGS]) +def fixture_quantization_config_path(request): + return request.param + + +def get_sample_config(quantization_config_path: Path, data_dir: str) -> SampleConfig: + parser = get_argument_parser() + data_dir = data_dir / "imagenet" + args = parser.parse_args(["-c", str(quantization_config_path), "--data", str(data_dir), "--dataset", "imagenet"]) + sample_config = create_sample_config(args, parser) + device = torch.device("cpu") + if torch.cuda.is_available(): + device = torch.device("cuda") + + sample_config.device = device + sample_config.execution_mode = get_execution_mode(sample_config) + return sample_config + + +@dataclass +class DatasetSet: + train_data_loader: torch.utils.data.DataLoader + val_data_loader: torch.utils.data.DataLoader + train_sampler: torch.utils.data.SequentialSampler + calibration_dataset: nncf.Dataset + + +def get_datasets(sample_config: SampleConfig) -> DatasetSet: + train_dataset, val_dataset = create_datasets(sample_config) + train_data_lodaer, train_sampler, val_data_loader, _ = create_data_loaders( + sample_config, train_dataset, val_dataset + ) + + def transform_fn(data_item): + return data_item[0].to(sample_config.device) + + val_data_loader_batch_one = torch.utils.data.DataLoader( + val_dataset, + batch_size=1, + shuffle=False, + num_workers=1, + ) + calibration_dataset = nncf.Dataset(val_data_loader_batch_one, transform_fn) + return DatasetSet( + train_data_loader=train_data_lodaer, + val_data_loader=val_data_loader, + train_sampler=train_sampler, + calibration_dataset=calibration_dataset, + ) + + +def accuracy_drop_is_acceptable(acc_drop: float) -> bool: + """ + Returns True in case acc_drop is less than 1 percent. + """ + return acc_drop < 1.0 + + +def get_optimizer_and_lr_scheduler(config: SampleConfig, model: torch.nn.Module): + params_to_optimize = get_parameter_groups(model, config) + optimizer, lr_scheduler = make_optimizer(params_to_optimize, config) + return optimizer, lr_scheduler + + +def train( + model: torch.nn.Module, + config: SampleConfig, + criterion: torch.nn.Module, + train_criterion_fn: callable, + datasets: DatasetSet, + original_accuracy: float, + compression_ctrl: BaseCompressionAlgorithmController, +) -> float: + """ + :return: Accuracy drop between original accuracy and trained quantized model accuracy. + """ + model, _ = prepare_model_for_execution(model, config) + if config.distributed: + broadcast_initialized_parameters(model) + + optimizer, lr_scheduler = get_optimizer_and_lr_scheduler(config, model) + + best_acc1 = 0 + logger.info("Quantization aware training pipeline starts.") + for epoch in range(config.start_epoch, config.epochs + 1): + current_accuracy, *_ = validate(datasets.val_data_loader, model, criterion, config, epoch - 1) + best_acc1 = max(current_accuracy, best_acc1) + acc_drop = original_accuracy - current_accuracy + logger.info(f"Metric: {current_accuracy}, FP32 diff: {acc_drop}") + if accuracy_drop_is_acceptable(acc_drop): + logger.info(f"Accuracy is within 1 percent drop, pipeline is making early exit on epoch {epoch - 1}") + logger.info( + f"Epochs in config: {config.epochs}, epochs trained: {epoch}, epochs saved: {config.epochs - epoch}" + ) + return acc_drop + if epoch == config.epochs: + logger.info("Training pipeline is finished, accuracy was not recovered.") + return acc_drop + + # update compression scheduler state at the begin of the epoch + if config.distributed: + datasets.train_sampler.set_epoch(epoch) + + # train for one epoch + train_epoch( + datasets.train_data_loader, model, criterion, train_criterion_fn, optimizer, compression_ctrl, epoch, config + ) + + # Learning rate scheduling should be applied after optimizer’s update + lr_scheduler.step(epoch if not isinstance(lr_scheduler, ReduceLROnPlateau) else best_acc1) + + +def check_training_correctness( + config: SampleConfig, + model: torch.nn.Module, + datasets: DatasetSet, + criterion: torch.nn.Module, + train_criterion_fn: callable, +): + """ + This function tries to run 50 training steps for one input and target pair and + checks loss decreases. This is needed to check model with compression could be + trained after the PTQ. + """ + logger.info("Check model is trainable...") + steps_to_check = 50 + optimizer, _ = get_optimizer_and_lr_scheduler(config, model) + input_, target = next(iter(datasets.calibration_dataset.get_data())) + input_ = input_.to(config.device) + target = target.to(config.device) + # Make batch_size==2 to make batchnorms work + with torch.no_grad(): + input_ = torch.cat([input_, input_], dim=0) + target = torch.cat([target, target], dim=0) + loss_list = [] + model.train() + for _ in range(steps_to_check): + output = model(input_) + loss = train_criterion_fn(output, target, criterion) + loss_list.append(loss.item()) + optimizer.zero_grad() + loss.backward() + optimizer.step() + + assert loss_list[-1] < loss_list[0] + + +def main_worker(current_gpu: int, config: SampleConfig): + configure_device(current_gpu, config) + if is_main_process(): + configure_logging(logger, config) + else: + config.tb = None + + pretrained = is_pretrained_model_requested(config) + model_name = config["model"] + # create model + logger.info(f"\nCreating model from config: {config.config}") + model = load_model( + model_name, + pretrained=pretrained, + num_classes=config.get("num_classes", 1000), + model_params=config.get("model_params"), + weights_path=config.get("weights"), + ) + model.to(config.device) + + datasets = get_datasets(config) + criterion = nn.CrossEntropyLoss() + criterion = criterion.to(config.device) + + logger.info("Original model validation:") + original_accuracy, *_ = validate(datasets.val_data_loader, model, criterion, config) + + logger.info("Apply quantization to the model:") + config_quantization_params = config["compression"] + + preset = get_quantization_preset(config_quantization_params) + advanced_parameters = get_advanced_ptq_parameters(config_quantization_params) + subset_size = get_num_samples(config_quantization_params) + + quantized_model = nncf.quantize( + model, + datasets.calibration_dataset, + preset=preset, + advanced_parameters=advanced_parameters, + subset_size=subset_size, + ) + + train_criterion_fn = inception_criterion_fn if "inception" in model_name else default_criterion_fn + acc_drop = train( + quantized_model, + config, + criterion, + train_criterion_fn, + datasets, + original_accuracy, + get_mocked_compression_ctrl(), + ) + assert accuracy_drop_is_acceptable(acc_drop) + check_training_correctness(config, model, datasets, criterion, train_criterion_fn) + logger.info("Done!") + + +@pytest.mark.weekly +def test_compression_training(quantization_config_path: Path, sota_data_dir): + sample_config = get_sample_config(quantization_config_path, sota_data_dir) + if sample_config.model == "mobilenet_v3_small": + # Use default range initializer for mobilenet_v3_small + # as due to PTQ advantages it works better for the model. + del sample_config.nncf_config["compression"]["initializer"]["range"] + del sample_config["compression"]["initializer"]["range"] + + start_worker_clean_memory(main_worker, sample_config) diff --git a/tests/torch/qat/test_qat_object_detection.py b/tests/torch/qat/test_qat_object_detection.py new file mode 100644 index 00000000000..fde3a1a4c45 --- /dev/null +++ b/tests/torch/qat/test_qat_object_detection.py @@ -0,0 +1,318 @@ +# Copyright (c) 2024 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dataclasses import dataclass +from pathlib import Path +from typing import List + +import pytest +import torch +import torch.utils.data +import torch.utils.data.distributed +from helpers import broadcast_initialized_parameters +from helpers import get_advanced_ptq_parameters +from helpers import get_mocked_compression_ctrl +from helpers import get_num_samples +from helpers import get_quantization_preset +from helpers import start_worker_clean_memory +from torch.optim.lr_scheduler import ReduceLROnPlateau + +import nncf +from examples.common.sample_config import SampleConfig +from examples.common.sample_config import create_sample_config +from examples.torch.common.example_logger import logger +from examples.torch.common.execution import get_execution_mode +from examples.torch.common.execution import prepare_model_for_execution +from examples.torch.common.optimizer import get_parameter_groups +from examples.torch.common.optimizer import make_optimizer +from examples.torch.common.utils import configure_device +from examples.torch.common.utils import configure_logging +from examples.torch.common.utils import is_on_first_rank +from examples.torch.object_detection.dataset import detection_collate +from examples.torch.object_detection.dataset import get_testing_dataset +from examples.torch.object_detection.eval import test_net as sample_validate +from examples.torch.object_detection.layers.modules import MultiBoxLoss +from examples.torch.object_detection.main import create_dataloaders +from examples.torch.object_detection.main import create_model +from examples.torch.object_detection.main import get_argument_parser +from examples.torch.object_detection.main import train_epoch +from nncf import NNCFConfig +from nncf.common.compression import BaseCompressionAlgorithmController +from tests.shared.paths import PROJECT_ROOT + +CONFIGS = list((PROJECT_ROOT / Path("examples/torch/object_detection/configs")).glob("*")) + + +def _get_filtered_quantization_configs() -> List[Path]: + configs = [] + for quantization_config_path in CONFIGS: + nncf_config = NNCFConfig.from_json(quantization_config_path) + if ( + "compression" not in nncf_config + or isinstance(nncf_config["compression"], list) + or nncf_config["compression"]["algorithm"] != "quantization" + ): + # Config without compression + continue + + if "accuracy_aware_training" in nncf_config: + # Accuracy Aware training is not supported yet for QAT with PTQ. + continue + + configs.append(quantization_config_path) + return configs + + +FILTERED_CONFIGS = _get_filtered_quantization_configs() + + +@pytest.fixture(name="quantization_config_path", params=FILTERED_CONFIGS, ids=[conf.stem for conf in FILTERED_CONFIGS]) +def fixture_quantization_config_path(request): + return request.param + + +def get_sample_config(quantization_config_path: Path, data_dir: Path, weights_dir: Path) -> SampleConfig: + parser = get_argument_parser() + weights_path = weights_dir / "object_detection" / "voc" / (quantization_config_path.stem.split("_int8")[0] + ".pth") + args = parser.parse_args( + [ + "-c", + str(quantization_config_path), + "--data", + str(data_dir), + "--dataset", + "voc", + "--weights", + str(weights_path), + ] + ) + sample_config = create_sample_config(args, parser) + device = torch.device("cpu") + if torch.cuda.is_available(): + device = torch.device("cuda") + + sample_config.device = device + sample_config.execution_mode = get_execution_mode(sample_config) + + if sample_config.dataset_dir is not None: + sample_config.train_imgs = ( + sample_config.train_anno + ) = sample_config.test_imgs = sample_config.test_anno = sample_config.dataset_dir + return sample_config + + +@dataclass +class DatasetSet: + train_data_loader: torch.utils.data.DataLoader + test_data_loader: torch.utils.data.DataLoader + calibration_dataset: nncf.Dataset + + +def get_datasets(config: SampleConfig) -> DatasetSet: + test_data_loader, train_data_loader, _ = create_dataloaders(config) + + test_dataset = get_testing_dataset(config.dataset, config.test_anno, config.test_imgs, config) + logger.info("Loaded {} testing images".format(len(test_dataset))) + if config.distributed: + test_sampler = torch.utils.data.DistributedSampler(test_dataset, config.rank, config.world_size) + else: + test_sampler = torch.utils.data.SequentialSampler(test_dataset) + + def transform_fn(data_item): + return data_item[0].to(config.device) + + val_data_loader_batch_one = torch.utils.data.DataLoader( + test_dataset, + batch_size=1, + num_workers=config.workers, + shuffle=False, + collate_fn=detection_collate, + pin_memory=True, + drop_last=False, + sampler=test_sampler, + ) + + calibration_dataset = nncf.Dataset(val_data_loader_batch_one, transform_fn) + return DatasetSet( + train_data_loader=train_data_loader, + test_data_loader=test_data_loader, + calibration_dataset=calibration_dataset, + ) + + +def accuracy_drop_is_acceptable(acc_drop: float) -> bool: + """ + Returns True in case acc_drop is less than 1 percent. + """ + return acc_drop < 0.01 + + +def validate(net: torch.nn.Module, device, data_loader, distributed): + with torch.no_grad(): + net.eval() + return sample_validate(net, device, data_loader, distributed) + + +def get_optimizer_and_lr_scheduler(config: SampleConfig, model: torch.nn.Module): + params_to_optimize = get_parameter_groups(model, config) + optimizer, lr_scheduler = make_optimizer(params_to_optimize, config) + return optimizer, lr_scheduler + + +def train( + model: torch.nn.Module, + config: SampleConfig, + criterion: torch.nn.Module, + datasets: DatasetSet, + original_metric: float, + compression_ctrl: BaseCompressionAlgorithmController, +) -> float: + """ + :return: Accuracy drop between original accuracy and trained quantized model accuracy. + """ + model, _ = prepare_model_for_execution(model, config) + if config.distributed: + broadcast_initialized_parameters(model) + + optimizer, lr_scheduler = get_optimizer_and_lr_scheduler(config, model) + + best_metric = 0 + loc_loss = 0 + conf_loss = 0 + + epoch_size = len(datasets.train_data_loader) + logger.info("Quantization aware training pipeline starts.") + for epoch in range(config.start_epoch, config.epochs + 1): + current_metric = validate( + model, config.device, datasets.test_data_loader, distributed=config.multiprocessing_distributed + ) + best_metric = max(current_metric, best_metric) + acc_drop = original_metric - current_metric + logger.info(f"Metric: {current_metric}, FP32 diff: {acc_drop}") + if accuracy_drop_is_acceptable(acc_drop): + logger.info(f"Accuracy is within 1 percent drop," f" pipeline is making early exit on epoch {epoch - 1}") + logger.info( + f"Epochs in config: {config.epochs}, epochs trained: {epoch}, epochs saved: {config.epochs - epoch}" + ) + return acc_drop + if epoch == config.epochs: + logger.info("Training pipeline is finished, accuracy was not recovered.") + return acc_drop + + # update compression scheduler state at the begin of the epoch + if config.distributed: + datasets.train_sampler.set_epoch(epoch) + + # train for one epoch + model.train() + train_epoch( + compression_ctrl, + model, + config, + datasets.train_data_loader, + criterion, + optimizer, + epoch_size, + epoch, + loc_loss, + conf_loss, + ) + + # Learning rate scheduling should be applied after optimizer’s update + lr_scheduler.step(epoch if not isinstance(lr_scheduler, ReduceLROnPlateau) else best_metric) + + +def check_training_correctness( + config: SampleConfig, model: torch.nn.Module, datasets: DatasetSet, criterion: torch.nn.Module +): + """ + This function tries to run 50 training steps for one input and target pair and + checks loss decreases. This is needed to check model with compression could be + trained after the PTQ. + """ + logger.info("Check model is trainable...") + steps_to_check = 50 + optimizer, _ = get_optimizer_and_lr_scheduler(config, model) + images, targets, *_ = next(iter(datasets.calibration_dataset.get_data())) + images = images.to(config.device) + targets = [t.to(config.device) for t in targets] + with torch.no_grad(): + images = torch.cat([images, images], dim=0) + targets.append(targets[0]) + loss_list = [] + model.train() + for _ in range(steps_to_check): + output = model(images) + loss_l, loss_c = criterion(output, targets) + loss = loss_l + loss_c + loss_list.append(loss.item()) + optimizer.zero_grad() + loss.backward() + optimizer.step() + + assert loss_list[-1] < loss_list[0] + + +def main_worker(current_gpu: int, config: SampleConfig): + configure_device(current_gpu, config) + if is_on_first_rank(config): + configure_logging(logger, config) + + # create model + logger.info(f"\nCreating model from config: {config.config}") + model = create_model(config) + + datasets = get_datasets(config) + criterion = MultiBoxLoss( + config, + config["num_classes"], + overlap_thresh=0.5, + prior_for_matching=True, + bkg_label=0, + neg_mining=True, + neg_pos=3, + neg_overlap=0.5, + encode_target=False, + device=config.device, + ) + criterion = criterion.to(config.device) + + logger.info("Original model validation:") + original_metric = validate(model, config.device, datasets.test_data_loader, config.distributed) + + logger.info("Apply quantization to the model:") + config_quantization_params = config["compression"] + + preset = get_quantization_preset(config_quantization_params) + advanced_parameters = get_advanced_ptq_parameters(config_quantization_params) + subset_size = get_num_samples(config_quantization_params) + + quantized_model = nncf.quantize( + model, + datasets.calibration_dataset, + preset=preset, + advanced_parameters=advanced_parameters, + subset_size=subset_size, + ) + if config.distributed: + config.batch_size //= config.ngpus_per_node + config.workers //= config.ngpus_per_node + + acc_drop = train(quantized_model, config, criterion, datasets, original_metric, get_mocked_compression_ctrl()) + assert accuracy_drop_is_acceptable(acc_drop) + check_training_correctness(config, model, datasets, criterion) + logger.info("Done!") + + +@pytest.mark.weekly +def test_compression_training(quantization_config_path: Path, sota_data_dir, sota_checkpoints_dir): + sample_config = get_sample_config(quantization_config_path, sota_data_dir, sota_checkpoints_dir) + start_worker_clean_memory(main_worker, sample_config) diff --git a/tests/torch/qat/test_qat_segmentation.py b/tests/torch/qat/test_qat_segmentation.py new file mode 100644 index 00000000000..f3dff7ef899 --- /dev/null +++ b/tests/torch/qat/test_qat_segmentation.py @@ -0,0 +1,340 @@ +# Copyright (c) 2024 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dataclasses import dataclass +from pathlib import Path +from typing import List + +import pytest +import torch +import torch.utils.data +import torch.utils.data.distributed +from helpers import broadcast_initialized_parameters +from helpers import get_advanced_ptq_parameters +from helpers import get_mocked_compression_ctrl +from helpers import get_num_samples +from helpers import get_quantization_preset +from helpers import start_worker_clean_memory +from torch.optim.lr_scheduler import ReduceLROnPlateau + +import nncf +from examples.common.sample_config import SampleConfig +from examples.common.sample_config import create_sample_config +from examples.torch.common.example_logger import logger +from examples.torch.common.execution import get_execution_mode +from examples.torch.common.execution import prepare_model_for_execution +from examples.torch.common.model_loader import load_model +from examples.torch.common.optimizer import make_optimizer +from examples.torch.common.utils import configure_device +from examples.torch.common.utils import configure_logging +from examples.torch.common.utils import is_pretrained_model_requested +from examples.torch.semantic_segmentation.main import get_arguments_parser +from examples.torch.semantic_segmentation.main import get_criterion +from examples.torch.semantic_segmentation.main import get_dataset +from examples.torch.semantic_segmentation.main import get_joint_transforms +from examples.torch.semantic_segmentation.main import get_params_to_optimize +from examples.torch.semantic_segmentation.main import load_dataset +from examples.torch.semantic_segmentation.main import test as sample_validate +from examples.torch.semantic_segmentation.metric import IoU +from examples.torch.semantic_segmentation.test import Test +from examples.torch.semantic_segmentation.train import Train +from examples.torch.semantic_segmentation.utils.loss_funcs import do_model_specific_postprocessing +from nncf import NNCFConfig +from nncf.common.compression import BaseCompressionAlgorithmController +from nncf.torch.utils import is_main_process +from tests.shared.paths import PROJECT_ROOT + +CONFIGS = list((PROJECT_ROOT / Path("examples/torch/semantic_segmentation/configs")).glob("*")) + + +def _get_filtered_quantization_configs() -> List[Path]: + configs = [] + for quantization_config_path in CONFIGS: + nncf_config = NNCFConfig.from_json(quantization_config_path) + if ( + "compression" not in nncf_config + or isinstance(nncf_config["compression"], list) + or nncf_config["compression"]["algorithm"] != "quantization" + ): + # Config without compression + continue + + if "accuracy_aware_training" in nncf_config: + # Accuracy Aware training is not supported yet for QAT with PTQ. + continue + configs.append(quantization_config_path) + return configs + + +FILTERED_CONFIGS = _get_filtered_quantization_configs() + + +@pytest.fixture(name="quantization_config_path", params=FILTERED_CONFIGS, ids=[conf.stem for conf in FILTERED_CONFIGS]) +def fixture_quantization_config_path(request): + return request.param + + +def get_sample_config(quantization_config_path: Path, data_dir: Path, weights_dir: Path) -> SampleConfig: + parser = get_arguments_parser() + meta = None + datasets_meta = [{"name": "mapillary", "dir_name": "mapillary_vistas"}, {"name": "camvid", "dir_name": "camvid"}] + for datset_meta in datasets_meta: + if datset_meta["name"] in quantization_config_path.stem: + meta = datset_meta + break + else: + raise RuntimeError(f"Dataset for the config {str(quantization_config_path)} is unknown.") + + weights_path = ( + weights_dir / "segmentation" / meta["name"] / (quantization_config_path.stem.split("_int8")[0] + ".pth") + ) + data_dir = data_dir / meta["dir_name"] + args = parser.parse_args( + [ + "-c", + str(quantization_config_path), + "--data", + str(data_dir), + "--dataset", + meta["name"], + "--weights", + str(weights_path), + ] + ) + sample_config = create_sample_config(args, parser) + device = torch.device("cpu") + if torch.cuda.is_available(): + device = torch.device("cuda") + + sample_config.device = device + sample_config.execution_mode = get_execution_mode(sample_config) + return sample_config + + +@dataclass +class DatasetSet: + train_data_loader: torch.utils.data.DataLoader + val_data_loader: torch.utils.data.DataLoader + class_weights: object + calibration_dataset: nncf.Dataset + + +def get_datasets(dataset, config: SampleConfig) -> DatasetSet: + loaders, w_class = load_dataset(dataset, config) + train_loader, val_loader, _ = loaders + transforms_val = get_joint_transforms(is_train=False, config=config) + # Get selected dataset + val_dataset = dataset(config.dataset_dir, image_set="val", transforms=transforms_val) + + def transform_fn(data_item): + return data_item[0].to(config.device) + + val_data_loader_batch_one = torch.utils.data.DataLoader( + val_dataset, + batch_size=1, + shuffle=False, + num_workers=1, + ) + calibration_dataset = nncf.Dataset(val_data_loader_batch_one, transform_fn) + return DatasetSet( + train_data_loader=train_loader, + val_data_loader=val_loader, + class_weights=w_class, + calibration_dataset=calibration_dataset, + ) + + +def accuracy_drop_is_acceptable(acc_drop: float) -> bool: + """ + Returns True in case acc_drop is less than 1 percent. + """ + return acc_drop < 0.01 + + +def get_optimizer_and_lr_scheduler(config: SampleConfig, model_without_dp: torch.nn.Module): + optim_config = config.get("optimizer", {}) + optim_params = optim_config.get("optimizer_params", {}) + lr = optim_params.get("lr", 1e-4) + + params_to_optimize = get_params_to_optimize(model_without_dp, lr * 10, config) + optimizer, lr_scheduler = make_optimizer(params_to_optimize, config) + return optimizer, lr_scheduler + + +def train( + model: torch.nn.Module, + model_without_dp: torch.nn.Module, + config: SampleConfig, + criterion: torch.nn.Module, + datasets: DatasetSet, + original_metric: float, + color_encoding: object, + compression_ctrl: BaseCompressionAlgorithmController, +) -> float: + """ + :return: Accuracy drop between original accuracy and trained quantized model accuracy. + """ + logger.info("\nTraining...\n") + + optimizer, lr_scheduler = get_optimizer_and_lr_scheduler(config, model_without_dp) + + # Evaluation metric + + ignore_index = None + ignore_unlabeled = config.get("ignore_unlabeled", True) + if ignore_unlabeled and ("unlabeled" in color_encoding): + ignore_index = list(color_encoding).index("unlabeled") + + metric = IoU(len(color_encoding), ignore_index=ignore_index) + + best_miou = -1 + + # Start Training + train_obj = Train( + model, datasets.train_data_loader, optimizer, criterion, compression_ctrl, metric, config.device, config.model + ) + val_obj = Test(model, datasets.val_data_loader, criterion, metric, config.device, config.model) + + logger.info("Quantization aware training pipeline starts.") + for epoch in range(config.start_epoch, config.epochs): + if config.distributed: + datasets.train_data_loader.sampler.set_epoch(epoch) + + logger.info(">>>> [Epoch: {0:d}] Validation".format(epoch)) + _, (_, current_miou) = val_obj.run_epoch(config.print_step) + # best_metric = max(current_miou, best_metric) + acc_drop = original_metric - current_miou + best_miou = max(current_miou, best_miou) + logger.info(f"Metric: {current_miou}, FP32 diff: {acc_drop}") + if accuracy_drop_is_acceptable(acc_drop): + logger.info(f"Accuracy is within 1 percent drop," f" pipeline is making early exit on epoch {epoch - 1}") + logger.info( + f"Epochs in config: {config.epochs}, epochs trained: {epoch}, epochs saved: {config.epochs - epoch}" + ) + return acc_drop + if epoch == config.epochs: + logger.info("Training pipeline is finished, accuracy was not recovered.") + return acc_drop + + logger.info(">>>> [Epoch: {0:d}] Training".format(epoch)) + epoch_loss, (_, miou) = train_obj.run_epoch(config.print_step) + + logger.info(">>>> [Epoch: {0:d}] Avg. loss: {1:.4f} | Mean IoU: {2:.4f}".format(epoch, epoch_loss, miou)) + + lr_scheduler.step(epoch if not isinstance(lr_scheduler, ReduceLROnPlateau) else best_miou) + + +def check_training_correctness( + config: SampleConfig, + model: torch.nn.Module, + datasets: DatasetSet, + criterion: torch.nn.Module, +): + """ + This function tries to run 50 training steps for one input and target pair and + checks loss decreases. This is needed to check model with compression could be + trained after the PTQ. + """ + logger.info("Check model is trainable...") + steps_to_check = 50 + model_without_dp = model + if hasattr(model_without_dp, "module"): + model_without_dp = model_without_dp.module + + optimizer, _ = get_optimizer_and_lr_scheduler(config, model_without_dp) + input_, labels, *_ = next(iter(datasets.calibration_dataset.get_data())) + input_ = input_.to(config.device) + labels = labels.to(config.device) + # Make batch_size==2 to make batchnorms work + with torch.no_grad(): + input_ = torch.cat([input_, input_], dim=0) + labels = torch.cat([labels, labels], dim=0) + loss_list = [] + model.train() + for _ in range(steps_to_check): + outputs = model(input_) + labels, loss_outputs, _ = do_model_specific_postprocessing(config.model, labels, outputs) + + # Loss computation + loss = criterion(loss_outputs, labels) + loss_list.append(loss.item()) + optimizer.zero_grad() + loss.backward() + optimizer.step() + + assert loss_list[-1] < loss_list[0] + + +def main_worker(current_gpu: int, config: SampleConfig): + configure_device(current_gpu, config) + if is_main_process(): + configure_logging(logger, config) + + # create model + logger.info(f"\nCreating model from config: {config.config}") + + dataset = get_dataset(config.dataset) + color_encoding = dataset.color_encoding + num_classes = len(color_encoding) + + pretrained = is_pretrained_model_requested(config) + model = load_model( + config.model, + pretrained=pretrained, + num_classes=num_classes, + model_params=config.get("model_params", {}), + weights_path=config.get("weights"), + ) + model.to(config.device) + + datasets = get_datasets(dataset, config) + criterion = get_criterion(datasets.class_weights, config) + + logger.info("Original model validation:") + original_metric = sample_validate(model, datasets.val_data_loader, criterion, color_encoding, config) + + logger.info("Apply quantization to the model:") + config_quantization_params = config["compression"] + + preset = get_quantization_preset(config_quantization_params) + advanced_parameters = get_advanced_ptq_parameters(config_quantization_params) + subset_size = get_num_samples(config_quantization_params) + + quantized_model = nncf.quantize( + model, + datasets.calibration_dataset, + preset=preset, + advanced_parameters=advanced_parameters, + subset_size=subset_size, + ) + model, model_without_dp = prepare_model_for_execution(model, config) + if config.distributed: + broadcast_initialized_parameters(model) + + acc_drop = train( + quantized_model, + model_without_dp, + config, + criterion, + datasets, + original_metric, + color_encoding, + get_mocked_compression_ctrl(), + ) + assert accuracy_drop_is_acceptable(acc_drop) + check_training_correctness(config, quantized_model, datasets, criterion) + logger.info("Done!") + + +@pytest.mark.weekly +def test_compression_training(quantization_config_path: Path, sota_data_dir, sota_checkpoints_dir): + sample_config = get_sample_config(quantization_config_path, sota_data_dir, sota_checkpoints_dir) + start_worker_clean_memory(main_worker, sample_config) diff --git a/tests/torch/quantization/quantization_helpers.py b/tests/torch/quantization/quantization_helpers.py index 16f2c83e7c7..45e0779e420 100644 --- a/tests/torch/quantization/quantization_helpers.py +++ b/tests/torch/quantization/quantization_helpers.py @@ -63,7 +63,7 @@ def get_squeezenet_quantization_config(image_size=32, batch_size=3): def distributed_init_test_default(gpu, ngpus_per_node, config): config.batch_size = 3 - config.workers = 0 # workaround for the pytorch multiprocessingdataloader issue/ + config.workers = 0 # workaround for the pytorch multiprocessingdataloader issue/ config.gpu = gpu config.ngpus_per_node = ngpus_per_node config.rank = gpu diff --git a/tests/torch/quantization/test_algo_quantization.py b/tests/torch/quantization/test_algo_quantization.py index c0d28bcef39..4a70ceebee2 100644 --- a/tests/torch/quantization/test_algo_quantization.py +++ b/tests/torch/quantization/test_algo_quantization.py @@ -36,11 +36,11 @@ from nncf.torch.compression_method_api import PTCompressionLoss from nncf.torch.dynamic_graph.scope import Scope from nncf.torch.dynamic_graph.scope import ScopeElement +from nncf.torch.graph.transformations.commands import ExtraCompressionModuleType from nncf.torch.layers import NNCFConv2d from nncf.torch.model_creation import create_compression_algorithm_builder from nncf.torch.module_operations import UpdateInputs from nncf.torch.module_operations import UpdateWeight -from nncf.torch.nncf_network import ExtraCompressionModuleType from nncf.torch.quantization.algo import QuantizationBuilder from nncf.torch.quantization.algo import QuantizationController from nncf.torch.quantization.layers import QUANTIZATION_MODULES diff --git a/tests/torch/quantization/test_strip.py b/tests/torch/quantization/test_strip.py index 3454c839e08..1c7105f2b91 100644 --- a/tests/torch/quantization/test_strip.py +++ b/tests/torch/quantization/test_strip.py @@ -22,7 +22,7 @@ from nncf.common.quantization.quantizers import get_num_levels from nncf.common.quantization.structs import QuantizationScheme as QuantizationMode from nncf.config import NNCFConfig -from nncf.torch.nncf_network import ExtraCompressionModuleType +from nncf.torch.graph.transformations.commands import ExtraCompressionModuleType from nncf.torch.quantization.layers import AsymmetricQuantizer from nncf.torch.quantization.layers import PTQuantizerSpec from nncf.torch.quantization.layers import SymmetricQuantizer diff --git a/tests/torch/sota_checkpoints_eval.json b/tests/torch/sota_checkpoints_eval.json index 90c1faf0696..c736f6c521d 100644 --- a/tests/torch/sota_checkpoints_eval.json +++ b/tests/torch/sota_checkpoints_eval.json @@ -12,7 +12,7 @@ "config": "examples/torch/classification/configs/quantization/resnet50_imagenet_int8.json", "reference": "resnet50_imagenet", "target_ov": 76.39, - "target_pt": 76.45, + "target_pt": 76.41, "metric_type": "Acc@1", "resume": "resnet50_imagenet_int8.pth", "model_description": "ResNet-50", @@ -24,19 +24,21 @@ "config": "examples/torch/classification/configs/quantization/resnet50_imagenet_int8_per_tensor.json", "reference": "resnet50_imagenet", "target_ov": 76.35, - "target_pt": 76.38, + "target_pt": 76.36, "metric_type": "Acc@1", "resume": "resnet50_imagenet_int8_per_tensor.pth", "model_description": "ResNet-50", "compression_description": "INT8 (per-tensor only)", "diff_fp32_min": -1, - "diff_fp32_max": 0.5 + "diff_fp32_max": 0.5, + "diff_target_pt_min": -0.15, + "diff_target_pt_max": 0.15 }, "resnet50_imagenet_int4_int8": { "config": "examples/torch/classification/configs/mixed_precision/resnet50_imagenet_mixed_int_hawq.json", "reference": "resnet50_imagenet", "target_ov": 75.61, - "target_pt": 75.86, + "target_pt": 75.94, "metric_type": "Acc@1", "resume": "resnet50_imagenet_int4_int8.pth", "model_description": "ResNet-50", @@ -48,7 +50,7 @@ "config": "examples/torch/classification/configs/sparsity_quantization/resnet50_imagenet_rb_sparsity_int8.json", "reference": "resnet50_imagenet", "target_ov": 75.39, - "target_pt": 75.42, + "target_pt": 75.41, "metric_type": "Acc@1", "resume": "resnet50_imagenet_rb_sparsity_int8.pth", "model_description": "ResNet-50", @@ -60,7 +62,7 @@ "config": "examples/torch/classification/configs/sparsity_quantization/resnet50_imagenet_rb_sparsity50_int8.json", "reference": "resnet50_imagenet", "target_ov": 75.44, - "target_pt": 75.47, + "target_pt": 75.5, "metric_type": "Acc@1", "resume": "resnet50_imagenet_rb_sparsity50_int8.pth", "model_description": "ResNet-50", @@ -90,7 +92,7 @@ "config": "examples/torch/classification/configs/quantization/inception_v3_imagenet_int8.json", "reference": "inception_v3_imagenet", "target_ov": 77.49, - "target_pt": 77.43, + "target_pt": 77.46, "metric_type": "Acc@1", "resume": "inception_v3_imagenet_int8.pth", "model_description": "Inception V3", @@ -103,7 +105,7 @@ "config": "examples/torch/classification/configs/sparsity_quantization/inception_v3_imagenet_rb_sparsity_int8.json", "reference": "inception_v3_imagenet", "target_ov": 76.34, - "target_pt": 76.32, + "target_pt": 76.34, "metric_type": "Acc@1", "resume": "inception_v3_imagenet_rb_sparsity_int8.pth", "model_description": "Inception V3", @@ -123,35 +125,35 @@ "config": "examples/torch/classification/configs/quantization/mobilenet_v2_imagenet_int8.json", "reference": "mobilenet_v2_imagenet", "target_ov": 71.01, - "target_pt": 71.24, + "target_pt": 71.22, "metric_type": "Acc@1", "resume": "mobilenet_v2_imagenet_int8.pth", "model_description": "MobileNet V2", "compression_description": "INT8", "diff_fp32_min": -1, "diff_fp32_max": 0.15, - "diff_target_pt_min": -0.3, - "diff_target_pt_max": 0.3 + "diff_target_pt_min": -0.2, + "diff_target_pt_max": 0.2 }, "mobilenet_v2_imagenet_int8_per_tensor": { "config": "examples/torch/classification/configs/quantization/mobilenet_v2_imagenet_int8_per_tensor.json", "reference": "mobilenet_v2_imagenet", "target_ov": 71.17, - "target_pt": 71.28, + "target_pt": 71.26, "metric_type": "Acc@1", "resume": "mobilenet_v2_imagenet_int8_per_tensor.pth", "model_description": "MobileNet V2", "compression_description": "INT8 (per-tensor only)", "diff_fp32_min": -1, "diff_fp32_max": 0.15, - "diff_target_pt_min": -0.3, - "diff_target_pt_max": 0.3 + "diff_target_pt_min": -0.2, + "diff_target_pt_max": 0.2 }, "mobilenet_v2_imagenet_int4_int8": { "config": "examples/torch/classification/configs/mixed_precision/mobilenet_v2_imagenet_mixed_int_hawq.json", "reference": "mobilenet_v2_imagenet", "target_ov": 70.52, - "target_pt": 70.57, + "target_pt": 70.68, "metric_type": "Acc@1", "resume": "mobilenet_v2_imagenet_int4_int8.pth", "model_description": "MobileNet V2", @@ -165,7 +167,7 @@ "config": "examples/torch/classification/configs/sparsity_quantization/mobilenet_v2_imagenet_rb_sparsity_int8.json", "reference": "mobilenet_v2_imagenet", "target_ov": 71.07, - "target_pt": 71.02, + "target_pt": 71.06, "metric_type": "Acc@1", "resume": "mobilenet_v2_imagenet_rb_sparsity_int8.pth", "model_description": "MobileNet V2", @@ -186,13 +188,15 @@ "config": "examples/torch/classification/configs/quantization/mobilenet_v3_small_imagenet_int8.json", "reference": "mobilenet_v3_small_imagenet", "target_ov": 66.92, - "target_pt": 66.97, + "target_pt": 66.87, "metric_type": "Acc@1", "resume": "mobilenet_v3_small_imagenet_int8.pth", "model_description": "MobileNet V3 small", "compression_description": "INT8", "diff_fp32_min": -1, - "diff_fp32_max": 0.15 + "diff_fp32_max": 0.15, + "diff_target_pt_min": -0.2, + "diff_target_pt_max": 0.2 }, "squeezenet1_1_imagenet": { "config": "examples/torch/classification/configs/quantization/squeezenet1_1_imagenet.json", @@ -205,39 +209,43 @@ "config": "examples/torch/classification/configs/quantization/squeezenet1_1_imagenet_int8.json", "reference": "squeezenet1_1_imagenet", "target_ov": 58.15, - "target_pt": 58.3, + "target_pt": 58.28, "metric_type": "Acc@1", "resume": "squeezenet1_1_imagenet_int8.pth", "model_description": "SqueezeNet V1.1", "compression_description": "INT8", "diff_fp32_min": -1, - "diff_fp32_max": 0.15 + "diff_fp32_max": 0.15, + "diff_target_pt_min": -0.2, + "diff_target_pt_max": 0.2 }, "squeezenet1_1_imagenet_int8_per_tensor": { "config": "examples/torch/classification/configs/quantization/squeezenet1_1_imagenet_int8_per_tensor.json", "reference": "squeezenet1_1_imagenet", "target_ov": 58.06, - "target_pt": 58.15, + "target_pt": 58.14, "metric_type": "Acc@1", "resume": "squeezenet1_1_imagenet_int8_per_tensor.pth", "model_description": "SqueezeNet V1.1", "compression_description": "INT8 (per-tensor only)", "diff_fp32_min": -1, - "diff_fp32_max": 0.15 + "diff_fp32_max": 0.15, + "diff_target_pt_min": -0.2, + "diff_target_pt_max": 0.2 }, "squeezenet1_1_imagenet_int4_int8": { "config": "examples/torch/classification/configs/mixed_precision/squeezenet1_1_imagenet_mixed_int_hawq_old_eval.json", "reference": "squeezenet1_1_imagenet", "target_ov": 57.53, - "target_pt": 57.59, + "target_pt": 57.66, "metric_type": "Acc@1", "resume": "squeezenet1_1_imagenet_int4_int8.pth", "model_description": "SqueezeNet V1.1", "compression_description": "Mixed, 52.83% INT8 / 47.17% INT4", "diff_fp32_min": -0.7, "diff_fp32_max": 0.7, - "diff_target_pt_min": -0.3, - "diff_target_pt_max": 0.3 + "diff_target_pt_min": -0.2, + "diff_target_pt_max": 0.2 }, "resnet18_imagenet": { "config": "examples/torch/classification/configs/pruning/resnet18_imagenet.json", @@ -317,7 +325,7 @@ "config": "examples/torch/object_detection/configs/ssd300_mobilenet_voc_magnitude_int8.json", "reference": "ssd300_mobilenet_voc", "target_ov": 63.01, - "target_pt": 62.97, + "target_pt": 62.99, "metric_type": "Mean AP", "resume": "ssd300_mobilenet_voc_magnitude_sparsity_int8.pth", "model_description": "SSD300-MobileNet", @@ -338,19 +346,21 @@ "config": "examples/torch/object_detection/configs/ssd300_vgg_voc_int8.json", "reference": "ssd300_vgg_voc", "target_ov": 77.94, - "target_pt": 77.89, + "target_pt": 77.9, "metric_type": "Mean AP", "resume": "ssd300_vgg_voc_int8.pth", "model_description": "SSD300-VGG-BN", "compression_description": "INT8", "diff_fp32_min": -1, - "diff_fp32_max": 0.1 + "diff_fp32_max": 0.1, + "diff_target_pt_min": -0.2, + "diff_target_pt_max": 0.2 }, "ssd300_vgg_voc_magnitude_sparsity_int8": { "config": "examples/torch/object_detection/configs/ssd300_vgg_voc_magnitude_sparsity_int8.json", "reference": "ssd300_vgg_voc", "target_ov": 77.46, - "target_pt": 77.67, + "target_pt": 77.66, "metric_type": "Mean AP", "resume": "ssd300_vgg_voc_magnitude_sparsity_int8.pth", "model_description": "SSD300-VGG-BN", @@ -382,20 +392,22 @@ "config": "examples/torch/object_detection/configs/ssd512_vgg_voc_int8.json", "reference": "ssd512_vgg_voc", "target_ov": 80.19, - "target_pt": 80.09, + "target_pt": 80.11, "metric_type": "Mean AP", "resume": "ssd512_vgg_voc_int8.pth", "batch": 32, "model_description": "SSD512-VGG-BN", "compression_description": "INT8", "diff_fp32_min": -1, - "diff_fp32_max": 0.2 + "diff_fp32_max": 0.2, + "diff_target_pt_min": -0.15, + "diff_target_pt_max": 0.15 }, "ssd512_vgg_voc_magnitude_sparsity_int8": { "config": "examples/torch/object_detection/configs/ssd512_vgg_voc_magnitude_sparsity_int8.json", "reference": "ssd512_vgg_voc", "target_ov": 79.98, - "target_pt": 79.76, + "target_pt": 79.7, "metric_type": "Mean AP", "resume": "ssd512_vgg_voc_magnitude_sparsity_int8.pth", "batch": 32, @@ -456,7 +468,7 @@ "config": "examples/torch/semantic_segmentation/configs/icnet_camvid_int8.json", "reference": "icnet_camvid", "target_ov": 67.89, - "target_pt": 67.86, + "target_pt": 67.87, "metric_type": "Mean IoU", "resume": "icnet_camvid_int8.pth", "model_description": "ICNet", @@ -469,7 +481,7 @@ "config": "examples/torch/semantic_segmentation/configs/icnet_camvid_magnitude_sparsity_int8.json", "reference": "icnet_camvid", "target_ov": 67.16, - "target_pt": 67.17, + "target_pt": 67.16, "metric_type": "Mean IoU", "resume": "icnet_camvid_magnitude_sparsity_int8.pth", "model_description": "ICNet", diff --git a/tests/torch/sparsity/magnitude/test_algo.py b/tests/torch/sparsity/magnitude/test_algo.py index 0dc2da03aa0..f6f16d979c3 100644 --- a/tests/torch/sparsity/magnitude/test_algo.py +++ b/tests/torch/sparsity/magnitude/test_algo.py @@ -46,18 +46,15 @@ def test_can_create_magnitude_sparse_algo__with_defaults(): _, sparse_model_conv = check_correct_nncf_modules_replacement(model, sparse_model) - i = 0 - nncf_stats = compression_ctrl.statistics() for layer_info in nncf_stats.magnitude_sparsity.thresholds: assert layer_info.threshold == approx(0.24, 0.1) assert isinstance(compression_ctrl._weight_importance_fn, type(normed_magnitude)) - for sparse_module in sparse_model_conv.values(): + for i, sparse_module in enumerate(sparse_model_conv.values()): store = [] ref_mask = torch.ones_like(sparse_module.weight) if i == 0 else ref_mask_2 - i += 1 for op in sparse_module.pre_ops.values(): if isinstance(op, UpdateWeight) and isinstance(op.operand, BinaryMask): assert torch.allclose(op.operand.binary_mask, ref_mask) diff --git a/tests/torch/test_extractor.py b/tests/torch/test_extractor.py index b9ba7858d66..e592e6491d8 100644 --- a/tests/torch/test_extractor.py +++ b/tests/torch/test_extractor.py @@ -17,7 +17,7 @@ from nncf.common.graph.transformations.commands import TargetType from nncf.torch import wrap_model from nncf.torch.extractor import extract_model -from nncf.torch.graph.transformations.commands import PTQuantizerInsertionCommand +from nncf.torch.graph.transformations.command_creation import create_quantizer_insertion_command from nncf.torch.graph.transformations.commands import PTTargetPoint from nncf.torch.model_transformer import PTModelTransformer from nncf.torch.model_transformer import PTTransformationLayout @@ -97,7 +97,7 @@ def test_extract_model(model_cls, input_node_name, output_node_name): ), ), ) -def tes_extract_model_for_node_with_fq(model_cls, input_node_name, output_node_name): +def test_extract_model_for_node_with_fq(model_cls, input_node_name, output_node_name): example_input = torch.ones(model_cls.INPUT_SIZE) model = wrap_model(model_cls().eval(), example_input=example_input, trace_parameters=True) @@ -114,7 +114,7 @@ def tes_extract_model_for_node_with_fq(model_cls, input_node_name, output_node_n ) fq = SymmetricQuantizer(qspec) - command = PTQuantizerInsertionCommand( + command = create_quantizer_insertion_command( PTTargetPoint(TargetType.OPERATOR_PRE_HOOK, input_node_name, input_port_id=1), fq ) layout = PTTransformationLayout() @@ -125,9 +125,10 @@ def tes_extract_model_for_node_with_fq(model_cls, input_node_name, output_node_n with torch.no_grad(): ret1 = q_model(example_input) ret2 = extracted_module(example_input) - assert torch.any(torch.isclose(ret1, ret2)) + assert torch.all(torch.isclose(ret1, ret2)) + + extracted_fn = extracted_module + if isinstance(extracted_fn, nn.Sequential): + extracted_fn = extracted_module[0] - if isinstance(extracted_module, nn.Sequential): - assert extracted_module[0].w_fq is not None - else: - assert extracted_module.w_fq is not None + assert extracted_fn.fn_name is not None diff --git a/tests/torch/test_layer_attributes.py b/tests/torch/test_layer_attributes.py index bfcae281ef3..a2d82141a9b 100644 --- a/tests/torch/test_layer_attributes.py +++ b/tests/torch/test_layer_attributes.py @@ -31,6 +31,7 @@ from nncf.torch.dynamic_graph.io_handling import FillerInputElement from nncf.torch.dynamic_graph.io_handling import FillerInputInfo from nncf.torch.dynamic_graph.io_handling import ModelInputInfo +from nncf.torch.dynamic_graph.layer_attributes_handlers import apply_args_defaults from nncf.torch.graph.graph_builder import GraphBuilder from nncf.torch.graph.operator_metatypes import PTBatchNormMetatype from nncf.torch.graph.operator_metatypes import PTCatMetatype @@ -549,3 +550,31 @@ def test_can_set_valid_layer_attributes_wrap_model(desc: LayerAttributesTestDesc RefNodeDesc(node.metatype, node.layer_attributes) for node in graph.get_nodes_by_metatypes([desc.metatype_cls]) ] assert ref_values == actual_values + + +@pytest.mark.parametrize( + "signature, args, kwargs", + ( + (["a", "b"], [1, 2], {}), + (["a", "b"], [], {"a": 1, "b": 2}), + (["a", "b"], [1], {"b": 2}), + (["a", ("b", 2)], [1], {"b": 2}), + ([("a", 1), ("b", 2)], [], {"b": 2}), + ([("a", 1), ("b", 2)], [], {}), + ), +) +def test_apply_args_defaults(signature, args, kwargs): + ret = apply_args_defaults(args, kwargs, signature) + assert ret == {"a": 1, "b": 2} + + +@pytest.mark.parametrize( + "signature, args, kwargs", + ( + (["a", "b"], [1], {}), + ([1, 2], [], {}), + ), +) +def test_apply_args_defaults_errors(signature, args, kwargs): + with pytest.raises(ValueError): + apply_args_defaults(args, kwargs, signature) diff --git a/tests/torch/test_model_graph_manager.py b/tests/torch/test_model_graph_manager.py index 89c21c4b883..f1d9d743591 100644 --- a/tests/torch/test_model_graph_manager.py +++ b/tests/torch/test_model_graph_manager.py @@ -21,7 +21,7 @@ from nncf.common.graph.graph import NNCFNode from nncf.common.graph.transformations.commands import TargetType from nncf.torch import wrap_model -from nncf.torch.graph.transformations.commands import PTQuantizerInsertionCommand +from nncf.torch.graph.transformations.command_creation import create_quantizer_insertion_command from nncf.torch.graph.transformations.commands import PTTargetPoint from nncf.torch.model_graph_manager import get_const_data from nncf.torch.model_graph_manager import get_const_data_on_port @@ -268,7 +268,7 @@ def test_get_fake_quantizer(target_type, port_id): ) fq = SymmetricQuantizer(qspec) - command = PTQuantizerInsertionCommand(PTTargetPoint(target_type, node_name, input_port_id=port_id), fq) + command = create_quantizer_insertion_command(PTTargetPoint(target_type, node_name, input_port_id=port_id), fq) layout = PTTransformationLayout() layout.register(command) q_model = transformer.transform(layout) @@ -303,7 +303,9 @@ def test_is_quantized_weights(): ) fq = SymmetricQuantizer(qspec) - command = PTQuantizerInsertionCommand(PTTargetPoint(TargetType.OPERATOR_PRE_HOOK, node_name, input_port_id=1), fq) + command = create_quantizer_insertion_command( + PTTargetPoint(TargetType.OPERATOR_PRE_HOOK, node_name, input_port_id=1), fq + ) layout = PTTransformationLayout() layout.register(command) q_model = transformer.transform(layout) diff --git a/tests/torch/test_model_transformer.py b/tests/torch/test_model_transformer.py index a8b2172b9c2..c554a39ccb3 100644 --- a/tests/torch/test_model_transformer.py +++ b/tests/torch/test_model_transformer.py @@ -31,6 +31,7 @@ from nncf.common.insertion_point_graph import InsertionPointGraphNodeType from nncf.common.insertion_point_graph import PostHookInsertionPoint from nncf.common.insertion_point_graph import PreHookInsertionPoint +from nncf.common.quantization.structs import NonWeightQuantizerId from nncf.common.quantization.structs import QuantizationScheme as QuantizationMode from nncf.common.utils.backend import BackendType from nncf.common.utils.dot_file_rw import get_graph_without_data @@ -42,17 +43,18 @@ from nncf.torch.dynamic_graph.io_handling import FillerInputInfo from nncf.torch.dynamic_graph.operation_address import OperationAddress from nncf.torch.dynamic_graph.patch_pytorch import register_operator -from nncf.torch.external_hook import EXTERNAL_OP_STORAGE_NAME from nncf.torch.external_hook import ExternalOpCallHook from nncf.torch.graph.operator_metatypes import PTConv2dMetatype from nncf.torch.graph.operator_metatypes import PTInputNoopMetatype from nncf.torch.graph.operator_metatypes import PTModuleConv2dMetatype from nncf.torch.graph.operator_metatypes import PTOutputNoopMetatype from nncf.torch.graph.operator_metatypes import PTReshapeMetatype +from nncf.torch.graph.transformations.command_creation import create_quantizer_insertion_command +from nncf.torch.graph.transformations.command_creation import create_shared_quantizer_insertion_command +from nncf.torch.graph.transformations.commands import ExtraCompressionModuleType from nncf.torch.graph.transformations.commands import PTBiasCorrectionCommand from nncf.torch.graph.transformations.commands import PTInsertionCommand from nncf.torch.graph.transformations.commands import PTModelExtractionWithFusedBiasCommand -from nncf.torch.graph.transformations.commands import PTQuantizerInsertionCommand from nncf.torch.graph.transformations.commands import PTSharedFnInsertionCommand from nncf.torch.graph.transformations.commands import PTTargetPoint from nncf.torch.graph.transformations.commands import PTWeightUpdateCommand @@ -62,12 +64,13 @@ from nncf.torch.model_transformer import PTModelTransformer from nncf.torch.module_operations import BaseOp from nncf.torch.module_operations import UpdateWeight -from nncf.torch.nncf_network import ExtraCompressionModuleType from nncf.torch.nncf_network import NNCFNetwork from nncf.torch.nncf_network import PTInsertionPoint from nncf.torch.nncf_network import PTInsertionType +from nncf.torch.nncf_network import compression_module_type_to_attr_name from nncf.torch.quantization.layers import AsymmetricQuantizer from nncf.torch.quantization.layers import PTQuantizerSpec +from nncf.torch.utils import get_model_device from tests.common.quantization.mock_graphs import get_ip_graph_for_test from tests.common.quantization.mock_graphs import get_mock_model_graph_with_broken_output_edge_pattern from tests.common.quantization.mock_graphs import get_mock_model_graph_with_mergeable_pattern @@ -157,7 +160,10 @@ def test_single_insertions(self, trace_parameters, target_point: PTTargetPoint): not trace_parameters, ) if insertion_point.insertion_type in [PTInsertionType.OPERATOR_PRE_HOOK, PTInsertionType.OPERATOR_POST_HOOK]: - hook = lambda x: x + + def hook(x): + return x + else: hook = BaseOp(lambda x: x) @@ -183,6 +189,75 @@ def test_single_insertions(self, trace_parameters, target_point: PTTargetPoint): assert len(model.nncf._groups_vs_hooks_handlers[test_hook_group]) == 1 + class BaseOpWithParam(BaseOp): + def __init__(self, op): + super().__init__(op) + self.param1 = torch.nn.Parameter(torch.zeros((1,))) + self.param2 = torch.nn.Parameter(torch.zeros((1,))) + self.to_device = None + + def to(self, device): + super().to(device) + self.to_device = device + + @pytest.mark.parametrize("target_point", available_points) + @pytest.mark.parametrize("multidevice", (False, True)) + @pytest.mark.parametrize("hook", (lambda x: x, BaseOpWithParam(lambda x: x).cpu())) + def test_pt_insertion_command(self, target_point: PTTargetPoint, multidevice: bool, hook): + model = wrap_model(InsertionPointTestModel(), torch.ones([1, 1, 10, 10])) + + if multidevice: + if not torch.cuda.is_available(): + pytest.skip("Cuda is not available, could not run multidevice test case") + model.conv2.to("cuda") + + test_hook_group = "test_hook_group" + insertion_command = PTInsertionCommand(target_point, hook, hooks_group_name=test_hook_group) + layout = PTTransformationLayout() + layout.register(insertion_command) + transformer = PTModelTransformer(model) + + if target_point.target_type in [ + TargetType.PRE_LAYER_OPERATION, + TargetType.POST_LAYER_OPERATION, + ] and not isinstance(hook, nn.Module): + with pytest.raises(TypeError): + transformer.transform(layout) + return + transformer.transform(layout) + + insertion_point = PTInsertionPoint( + target_point.target_type, + model.nncf.get_node_to_op_address_mapping()[target_point.target_node_name], + target_point.input_port_id, + ) + + if target_point.target_type == TargetType.OPERATOR_PRE_HOOK: + ctx = model.nncf.get_tracing_context() + pre_hook_id = PreHookId(insertion_point.op_address, input_port_id=insertion_point.input_port_id) + assert ctx._pre_hooks[pre_hook_id]["0"] is hook + elif target_point.target_type == TargetType.OPERATOR_POST_HOOK: + ctx = model.nncf.get_tracing_context() + assert ctx._post_hooks[insertion_point.op_address]["0"] is hook + elif target_point.target_type == TargetType.OPERATION_WITH_WEIGHTS: + module = model.nncf.get_module_by_scope(insertion_point.module_scope) + w_hook = module.pre_ops["0"] + assert isinstance(w_hook, UpdateWeight) + assert w_hook.op is hook + elif target_point.target_type == TargetType.PRE_LAYER_OPERATION: + module = model.nncf.get_module_by_scope(insertion_point.module_scope) + assert module.pre_ops["0"] is hook + elif target_point.target_type == TargetType.POST_LAYER_OPERATION: + module = model.nncf.get_module_by_scope(insertion_point.module_scope) + assert module.post_ops["0"] is hook + else: + raise Exception(f"Not check order for {insertion_point.insertion_type}") + + if isinstance(hook, nn.Module) and not multidevice: + assert hook.to_device == get_model_device(model) + + assert len(model.nncf._groups_vs_hooks_handlers[test_hook_group]) == 1 + @staticmethod def check_order(iterable1: List, iterable2: List, ordering: List): for idx, order in enumerate(ordering): @@ -554,96 +629,104 @@ class Hook(torch.nn.Module): def __init__(self): super().__init__() self._param = torch.nn.Parameter(torch.zeros((1,))) + self.to_device = None def forward(self, x): return x + self._param + def to(self, device): + super().to(device) + self.to_device = device -@pytest.mark.parametrize( - "target_type, node_name, input_port_id, ref_name", - ( - (TargetType.OPERATOR_POST_HOOK, "/nncf_model_input_0", None, "/nncf_model_input_0|OUTPUT"), - ( - TargetType.OPERATOR_PRE_HOOK, - "InsertionPointTestModel/linear_0", - 0, - "InsertionPointTestModel/linear_0|INPUT0", - ), - (TargetType.OPERATION_WITH_WEIGHTS, "InsertionPointTestModel/NNCFConv2d[conv1]/conv2d_0", None, None), + +SHARED_FN_TARGET_POINTS = ( + PTTargetPoint( + TargetType.OPERATOR_POST_HOOK, + "/nncf_model_input_0", + ), + PTTargetPoint( + TargetType.OPERATOR_PRE_HOOK, + "InsertionPointTestModel/linear_0", + input_port_id=0, + ), + PTTargetPoint( + TargetType.OPERATION_WITH_WEIGHTS, + "InsertionPointTestModel/NNCFConv2d[conv1]/conv2d_0", ), ) -def test_quantizer_insertion_transformations(target_type, node_name, input_port_id, ref_name): - hook = Hook() - - def _insert_quantizer_to_model(): - model = NNCFNetwork(InsertionPointTestModel(), FillerInputInfo([FillerInputElement([1, 1, 10, 10])])) - model_transformer = PTModelTransformer(model) - - target_point = PTTargetPoint(target_type, node_name, input_port_id=input_port_id) - command = PTQuantizerInsertionCommand(target_point, hook) - transformation_layout = PTTransformationLayout() - transformation_layout.register(command) - return model_transformer.transform(transformation_layout) - transformed_model = _insert_quantizer_to_model() - - compression_module_type = ExtraCompressionModuleType.EXTERNAL_QUANTIZER - assert transformed_model.nncf.is_compression_module_registered(compression_module_type) - assert hook in transformed_model.modules() +@pytest.mark.parametrize("target_point", SHARED_FN_TARGET_POINTS) +def test_create_quantizer_insertion_command(target_point): + hook = Hook() + command = create_quantizer_insertion_command(target_point, hook) - if target_type == TargetType.OPERATION_WITH_WEIGHTS: - op = transformed_model.conv1.pre_ops._modules["0"] - assert isinstance(op, UpdateWeight) - assert isinstance(op.op, Hook) + assert command.fn is hook + if target_point.type is TargetType.OPERATION_WITH_WEIGHTS: + assert isinstance(command, PTInsertionCommand) else: - external_quantizers = transformed_model.nncf.get_compression_modules_by_type(compression_module_type) - assert hasattr(external_quantizers, ref_name) - op = getattr(external_quantizers, ref_name) - assert isinstance(op, Hook) + quantizer_id = NonWeightQuantizerId(target_point.target_node_name, target_point.input_port_id) + assert isinstance(command, PTSharedFnInsertionCommand) + assert command.target_points == [target_point] + assert command.fn is hook + storage_key = str(quantizer_id) + assert command.op_name == storage_key + assert command.compression_module_type is ExtraCompressionModuleType.EXTERNAL_QUANTIZER + + +def test_create_shared_quantizer_insertion_command(): + ref_storage_key = ( + "/nncf_model_input_0|OUTPUT;" + "InsertionPointTestModel/NNCFConv2d[conv1]/conv2d_0|OUTPUT;" + "InsertionPointTestModel/linear_0|INPUT0" + ) + hook = Hook() - # Check torch can correctly save and load model state dict with an external quantizer - state_dict = transformed_model.state_dict() - if target_type == TargetType.OPERATION_WITH_WEIGHTS: - state_dict_hook_key = "conv1.pre_ops.0.op._param" - else: - state_dict_hook_key = f"_nncf.external_quantizers.{ref_name}._param" - assert state_dict_hook_key in state_dict - del transformed_model - transformed_model = _insert_quantizer_to_model() - transformed_model.load_state_dict(state_dict) + command = create_shared_quantizer_insertion_command(list(SHARED_FN_TARGET_POINTS), hook) + assert command.fn is hook + assert isinstance(command, PTSharedFnInsertionCommand) + assert command.target_points == list(SHARED_FN_TARGET_POINTS) + assert command.fn is hook + assert command.op_name == ref_storage_key + assert command.compression_module_type is ExtraCompressionModuleType.EXTERNAL_QUANTIZER +@pytest.mark.parametrize("compression_module_type", ExtraCompressionModuleType) @pytest.mark.parametrize( "priority", [TransformationPriority.FP32_TENSOR_STATISTICS_OBSERVATION, TransformationPriority.DEFAULT_PRIORITY] ) @pytest.mark.parametrize("compression_module_registered", [False, True]) -def test_shared_fn_insertion_point(priority, compression_module_registered, mocker): - tps = [ - PTTargetPoint( - TargetType.OPERATOR_POST_HOOK, - "/nncf_model_input_0", - ), - PTTargetPoint( - TargetType.OPERATOR_PRE_HOOK, - "InsertionPointTestModel/linear_0", - input_port_id=0, - ), - PTTargetPoint( - TargetType.OPERATION_WITH_WEIGHTS, - "InsertionPointTestModel/NNCFConv2d[conv1]/conv2d_0", - ), - ] +@pytest.mark.parametrize("multidevice_model", (False, True)) +def test_shared_fn_insertion_point( + priority, compression_module_registered, compression_module_type, multidevice_model, mocker +): + if not torch.cuda.is_available() and multidevice_model: + pytest.skip("Could not test multidevice case without cuda") + + tps = SHARED_FN_TARGET_POINTS OP_UNIQUE_NAME = "UNIQUE_NAME" HOOK_GROUP_NAME = "shared_commands_hooks_group" + STORAGE_NAME = compression_module_type_to_attr_name(compression_module_type) hook_instance = Hook() def _insert_external_op_mocked(): model = NNCFNetwork(InsertionPointTestModel(), FillerInputInfo([FillerInputElement([1, 1, 10, 10])])) + model = model.cpu() + if multidevice_model: + model.conv1.to(torch.device("cpu")) + model.conv2.to(torch.device("cuda")) + if compression_module_registered: - model.nncf.register_compression_module_type(ExtraCompressionModuleType.EXTERNAL_OP) + model.nncf.register_compression_module_type(compression_module_type) unique_name = f"{OP_UNIQUE_NAME}[{';'.join([tp.target_node_name for tp in tps])}]" - command = PTSharedFnInsertionCommand(tps, hook_instance, unique_name, priority, HOOK_GROUP_NAME) + command = PTSharedFnInsertionCommand( + target_points=tps, + fn=hook_instance, + op_unique_name=unique_name, + compression_module_type=compression_module_type, + priority=priority, + hooks_group_name=HOOK_GROUP_NAME, + ) transformation_layout = PTTransformationLayout() transformation_layout.register(command) @@ -658,38 +741,126 @@ def _insert_external_op_mocked(): transformed_model = _insert_external_op_mocked() - assert transformed_model.nncf.is_compression_module_registered(ExtraCompressionModuleType.EXTERNAL_OP) + assert transformed_model.nncf.is_compression_module_registered(compression_module_type) REF_STORAGE_KEY = ( "UNIQUE_NAME[/nncf_model_input_0;InsertionPointTestModel/linear_0;" "InsertionPointTestModel/NNCFConv2d[conv1]/conv2d_0]" ) - storage = getattr(transformed_model.nncf, EXTERNAL_OP_STORAGE_NAME) + storage = getattr(transformed_model.nncf, STORAGE_NAME) assert storage[REF_STORAGE_KEY] is hook_instance assert hook_instance in transformed_model.modules() mock = PTModelTransformer._apply_insertion_transformations mock.assert_called_once() - _, commands = mock.call_args.args + _, commands, device = mock.call_args.args assert len(commands) == len(tps) for command in commands: assert command.target_point in tps assert command.hooks_group_name == HOOK_GROUP_NAME + assert command.priority == priority fn = command.fn assert isinstance(fn, ExternalOpCallHook) - assert fn._storage_name == EXTERNAL_OP_STORAGE_NAME + assert fn._storage_name == STORAGE_NAME assert fn._storage_key == REF_STORAGE_KEY + if multidevice_model: + assert hook_instance.to_device is None + assert device is None + else: + actual_model_device = get_model_device(transformed_model) + assert hook_instance.to_device == actual_model_device + assert device == actual_model_device + # Check torch can correctly save and load model state dict with an external quantizer state_dict = transformed_model.state_dict() - assert f"_nncf.{EXTERNAL_OP_STORAGE_NAME}.{REF_STORAGE_KEY}._param" in state_dict + assert f"_nncf.{STORAGE_NAME}.{REF_STORAGE_KEY}._param" in state_dict del transformed_model transformed_model = _insert_external_op_mocked() transformed_model.load_state_dict(state_dict) +@pytest.mark.parametrize( + "priority", [TransformationPriority.FP32_TENSOR_STATISTICS_OBSERVATION, TransformationPriority.DEFAULT_PRIORITY] +) +@pytest.mark.parametrize("compression_module_registered", [False, True]) +@pytest.mark.parametrize("multidevice_model", (False, True)) +def test_shared_fn_insertion_command_several_module_types( + priority, compression_module_registered, multidevice_model, mocker +): + if not torch.cuda.is_available() and multidevice_model: + pytest.skip("Could not test multidevice case without cuda") + + tps = SHARED_FN_TARGET_POINTS + OP_UNIQUE_NAME = "UNIQUE_NAME" + HOOK_GROUP_NAME = "shared_commands_hooks_group" + MODULE_TYPES = [t for t in ExtraCompressionModuleType] + hook_instance = Hook() + + def _insert_external_op_mocked(): + model = NNCFNetwork(InsertionPointTestModel(), FillerInputInfo([FillerInputElement([1, 1, 10, 10])])) + model = model.cpu() + if multidevice_model: + model.conv1.to(torch.device("cpu")) + model.conv2.to(torch.device("cuda")) + + transformation_layout = PTTransformationLayout() + for compression_module_type in MODULE_TYPES: + if compression_module_registered: + model.nncf.register_compression_module_type(compression_module_type) + unique_name = f"{OP_UNIQUE_NAME}[{';'.join([tp.target_node_name for tp in tps])}]" + command = PTSharedFnInsertionCommand( + target_points=tps, + fn=hook_instance, + op_unique_name=unique_name, + compression_module_type=compression_module_type, + priority=priority, + hooks_group_name=HOOK_GROUP_NAME, + ) + transformation_layout.register(command) + + mocker.MagicMock() + mocker.patch( + "nncf.torch.model_transformer.PTModelTransformer._apply_shared_node_insertion_with_compression_type", + return_value=mocker.MagicMock(), + ) + model_transformer = PTModelTransformer(model) + model_transformer.transform(transformation_layout=transformation_layout) + return model + + transformed_model = _insert_external_op_mocked() + + mock = PTModelTransformer._apply_shared_node_insertion_with_compression_type + assert len(mock.call_args_list) == len(MODULE_TYPES) + + REF_STORAGE_KEY = ( + "UNIQUE_NAME[/nncf_model_input_0;InsertionPointTestModel/linear_0;" + "InsertionPointTestModel/NNCFConv2d[conv1]/conv2d_0]" + ) + + module_types_set = set(MODULE_TYPES) + for (_, commands, device, compression_module_type), _ in mock.call_args_list: + module_types_set -= set((compression_module_type,)) + assert len(commands) == 1 + command = commands[0] + assert isinstance(command, PTSharedFnInsertionCommand) + assert command.fn is hook_instance + assert command.target_points is tps + assert command.compression_module_type == compression_module_type + assert command.op_name == REF_STORAGE_KEY + assert command.priority == priority + assert command.hooks_group_name == HOOK_GROUP_NAME + + if multidevice_model: + assert device is None + else: + assert device == get_model_device(transformed_model) + + assert not module_types_set + + INSERTION_POINT_TEST_MODEL_TARGET_POINTS = ( ( TargetType.OPERATOR_POST_HOOK, diff --git a/tests/torch/test_nncf_network.py b/tests/torch/test_nncf_network.py index 938eae5f73c..c4da4be8c82 100644 --- a/tests/torch/test_nncf_network.py +++ b/tests/torch/test_nncf_network.py @@ -39,11 +39,11 @@ from nncf.torch.graph.graph_builder import GraphBuilder from nncf.torch.graph.operator_metatypes import PTConv2dMetatype from nncf.torch.graph.operator_metatypes import PTModuleConv2dMetatype +from nncf.torch.graph.transformations.commands import ExtraCompressionModuleType from nncf.torch.layer_utils import _NNCFModuleMixin from nncf.torch.layers import NNCFConv2d from nncf.torch.model_creation import wrap_model from nncf.torch.nncf_module_replacement import replace_modules_by_nncf_modules -from nncf.torch.nncf_network import ExtraCompressionModuleType from nncf.torch.nncf_network import NNCFNetwork from nncf.torch.nncf_network import PTInsertionPoint from nncf.torch.nncf_network import PTInsertionType diff --git a/tests/torch/test_statistics_aggregator.py b/tests/torch/test_statistics_aggregator.py index 11e5ce70942..a5c8a71788e 100644 --- a/tests/torch/test_statistics_aggregator.py +++ b/tests/torch/test_statistics_aggregator.py @@ -168,6 +168,7 @@ def test_successive_statistics_aggregation( dataset_samples, inplace_statistics, is_backend_support_custom_estimators, + mocker, ): is_stat_in_shape_of_scale = True model = self.get_backend_model(dataset_samples) @@ -182,29 +183,29 @@ def test_successive_statistics_aggregation( if not is_standard_estimator and not is_backend_support_custom_estimators: pytest.skip("Custom estimators are not supported for this backend yet") - ### Register operations before statistic collection + # Register operations before statistic collection def fn(x): return x * 2 target_point = self.get_target_point(test_parameters.target_type) model = self.__add_fn_to_model(model, target_point, fn) - ### Check hook inserted correctly + # Check hook inserted correctly self.__check_successive_hooks(test_parameters, model, target_point, fn) - ### Register and collect statistics after inserted operations + # Register and collect statistics after inserted operations statistic_points = self.__get_statistic_points( - test_parameters, model, quantizer_config, dataset_samples, inplace_statistics + test_parameters, model, quantizer_config, dataset_samples, inplace_statistics, mocker ) tensor_collector = self.__collect_statistics_get_collector(statistic_points, model, dataset_samples) - ### Check values are changed because of the inserted operation + # Check values are changed because of the inserted operation self.__check_collector( test_parameters, tensor_collector, is_stat_in_shape_of_scale, ) - ### Check the inserted operation is inside the model + # Check the inserted operation is inside the model self.__check_successive_hooks(test_parameters, model, target_point, fn) @pytest.mark.parametrize( @@ -254,6 +255,7 @@ def test_nested_statistics_aggregation( dataset_samples, inplace_statistics, is_backend_support_custom_estimators, + mocker, ): is_stat_in_shape_of_scale = True model = self.get_backend_model(dataset_samples) @@ -268,7 +270,7 @@ def test_nested_statistics_aggregation( if not is_standard_estimator and not is_backend_support_custom_estimators: pytest.skip("Custom estimators are not supported for this backend yet") - ### Register operations before statistic collection + # Register operations before statistic collection @register_operator() def fn(x): return x * 2 @@ -278,26 +280,22 @@ def fn(x): nested_target_point = PTMinMaxAlgoBackend.target_point(nested_target_type, nested_target_node_name, 0) model = self.__add_fn_to_model(model, nested_target_point, fn) - ### Check hook inserted correctly + # Check hook inserted correctly self.__check_nested_hooks(test_parameters, model, target_point, nested_target_type, nested_target_node_name, fn) - ### Register and collect statistics after inserted operations + # Register and collect statistics after inserted operations statistic_points = self.__get_statistic_points( - test_parameters, - model, - quantizer_config, - dataset_samples, - inplace_statistics, + test_parameters, model, quantizer_config, dataset_samples, inplace_statistics, mocker ) tensor_collector = self.__collect_statistics_get_collector(statistic_points, model, dataset_samples) - ### Check values are changed because of the inserted operation + # Check values are changed because of the inserted operation self.__check_collector( test_parameters, tensor_collector, is_stat_in_shape_of_scale, ) - ### Check the inserted operation is inside the model + # Check the inserted operation is inside the model self.__check_nested_hooks(test_parameters, model, target_point, nested_target_type, nested_target_node_name, fn) @staticmethod @@ -312,7 +310,7 @@ def __add_fn_to_model(model, target_point, fn): @classmethod def __get_statistic_points( - cls, test_parameters: MinMaxTestParameters, model, quantizer_config, dataset_samples, inplace_statistics + cls, test_parameters: MinMaxTestParameters, model, quantizer_config, dataset_samples, inplace_statistics, mocker ) -> StatisticPointsContainer: statistics_points = StatisticPointsContainer() for target_type in [test_parameters.target_type]: @@ -325,6 +323,7 @@ def __get_statistic_points( "TEST_ALGO", inplace_statistics, test_parameters.range_estimator_params, + mocker, ) statistics_points.add_statistic_point(statistic_point) return statistics_points diff --git a/tests/torch/test_tracing_context.py b/tests/torch/test_tracing_context.py index 37642fad5f6..7c4a23a588a 100644 --- a/tests/torch/test_tracing_context.py +++ b/tests/torch/test_tracing_context.py @@ -17,7 +17,7 @@ from nncf.torch.dynamic_graph.trace_tensor import TracedParameter from nncf.torch.dynamic_graph.trace_tensor import TracedTensor from nncf.torch.dynamic_graph.wrappers import wrap_parameters -from nncf.torch.nncf_network import ExtraCompressionModuleType +from nncf.torch.graph.transformations.commands import ExtraCompressionModuleType from tests.torch.helpers import BasicConvTestModel @@ -110,10 +110,10 @@ def test_traced_tensors_are_stripped_on_context_exit(): assert isinstance(module.weight, TracedParameter) assert isinstance(module.conv2d.weight, TracedParameter) assert isinstance(result, TracedTensor) - assert type(module.cached_tensor) == torch.Tensor - assert type(result) == torch.Tensor - assert type(module.weight) == torch.nn.Parameter - assert type(module.conv2d.weight) == torch.nn.Parameter + assert isinstance(module.cached_tensor, torch.Tensor) + assert isinstance(result, torch.Tensor) + assert isinstance(module.weight, torch.nn.Parameter) + assert isinstance(module.conv2d.weight, torch.nn.Parameter) def test_no_cross_forward_run_dependency(): diff --git a/tools/extract_ov_subgraph.py b/tools/extract_ov_subgraph.py index 022ee777e8d..0739bfbf1eb 100644 --- a/tools/extract_ov_subgraph.py +++ b/tools/extract_ov_subgraph.py @@ -94,8 +94,8 @@ def get_nodes(xml_dict: Dict, edges: Dict): try: attributes = node["attributes"] data = node["data"]["attributes"] if "data" in node else None - inp = node["input"] if "input" in node else None - out = node["output"] if "output" in node else None + inp = node.get("input", None) + out = node.get("output", None) node_id = int(attributes["id"]) node_name = attributes["name"]